text
stringlengths 28
881k
|
---|
#!/usr/bin/env python3NEWLINE# -*- coding: utf-8 -*-NEWLINENEWLINE# Licensed to the Apache Software Foundation (ASF) under oneNEWLINE# or more contributor license agreements. See the NOTICE fileNEWLINE# distributed with this work for additional informationNEWLINE# regarding copyright ownership. The ASF licenses this fileNEWLINE# to you under the Apache License, Version 2.0 (theNEWLINE# "License"); you may not use this file except in complianceNEWLINE# with the License. You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing,NEWLINE# software distributed under the License is distributed on anNEWLINE# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANYNEWLINE# KIND, either express or implied. See the License for theNEWLINE# specific language governing permissions and limitationsNEWLINE# under the License.NEWLINENEWLINE"""Multi arch dockerized build tool.NEWLINENEWLINE"""NEWLINENEWLINE__author__ = 'Pedro Larroy'NEWLINE__version__ = '0.1'NEWLINENEWLINEimport osNEWLINEimport sysNEWLINEimport subprocessNEWLINEimport loggingNEWLINEimport argparseNEWLINEfrom subprocess import check_callNEWLINEimport globNEWLINEimport reNEWLINENEWLINEclass CmdResult(object):NEWLINE def __init__(self, std_out, std_err, status_code):NEWLINE self.std_out = std_outNEWLINE self.std_err = std_errNEWLINE self.status_code = status_code if status_code is not None else 0NEWLINENEWLINE def __str__(self):NEWLINE return "%s, %s, %s" % (self.std_out, self.std_err, self.status_code)NEWLINENEWLINEdef run(cmd, fail_on_error=True):NEWLINE logging.debug("executing shell command:\n" + cmd)NEWLINE proc = subprocess.Popen(NEWLINE cmd,NEWLINE shell=True,NEWLINE stdout=subprocess.PIPE,NEWLINE stderr=subprocess.PIPE,NEWLINE )NEWLINE std_out, std_err = proc.communicate()NEWLINE if fail_on_error:NEWLINE if proc.returncode != 0:NEWLINE logging.warn('Error running command: {}'.format(cmd))NEWLINE assert proc.returncode == 0, std_errNEWLINE res = CmdResult(std_out.decode('utf-8'), std_err.decode('utf-8'), proc.returncode)NEWLINE return resNEWLINENEWLINENEWLINEdef mkdir_p(d):NEWLINE rev_path_list = list()NEWLINE head = dNEWLINE while len(head) and head != os.sep:NEWLINE rev_path_list.append(head)NEWLINE (head, tail) = os.path.split(head)NEWLINENEWLINE rev_path_list.reverse()NEWLINE for p in rev_path_list:NEWLINE try:NEWLINE os.mkdir(p)NEWLINE except OSError as e:NEWLINE if e.errno != 17:NEWLINE raiseNEWLINENEWLINEdef get_arches():NEWLINE """Get a list of architectures given our dockerfiles"""NEWLINE dockerfiles = glob.glob("Dockerfile.build.*")NEWLINE dockerfiles = list(filter(lambda x: x[-1] != '~', dockerfiles))NEWLINE arches = list(map(lambda x: re.sub(r"Dockerfile.build.(.*)", r"\1", x), dockerfiles))NEWLINE arches.sort()NEWLINE return archesNEWLINENEWLINEdef sync_source():NEWLINE logging.info("Copying sources")NEWLINE check_call(["rsync","-a","--delete","--exclude=\".git/\"",'--exclude=/docker_multiarch/',"../","mxnet"])NEWLINENEWLINEdef get_docker_tag(arch):NEWLINE return "mxnet.build.{0}".format(arch)NEWLINENEWLINEdef get_dockerfile(arch):NEWLINE return "Dockerfile.build.{0}".format(arch)NEWLINENEWLINEdef build(arch):NEWLINE """Build the given architecture in the container"""NEWLINE assert arch in get_arches(), "No such architecture {0}, Dockerfile.build.{0} not found".format(arch)NEWLINE logging.info("Building for target platform {0}".format(arch))NEWLINE check_call(["docker", "build",NEWLINE "-f", get_dockerfile(arch),NEWLINE "-t", get_docker_tag(arch),NEWLINE "."])NEWLINENEWLINEdef collect_artifacts(arch):NEWLINE """Collects the artifacts built inside the docker container to the local fs"""NEWLINE def artifact_path(arch):NEWLINE return "{}/build/{}".format(os.getcwd(), arch)NEWLINE logging.info("Collect artifacts from build in {0}".format(artifact_path(arch)))NEWLINE mkdir_p("build/{}".format(arch))NEWLINENEWLINE # Mount artifact_path on /$arch inside the container and copy the build output so we can accessNEWLINE # locally from the host fsNEWLINE check_call(["docker","run",NEWLINE "-v", "{}:/{}".format(artifact_path(arch), arch),NEWLINE get_docker_tag(arch),NEWLINE "bash", "-c", "cp -r /work/build/* /{}".format(arch)])NEWLINENEWLINEdef main():NEWLINE logging.getLogger().setLevel(logging.INFO)NEWLINE logging.basicConfig(format='%(asctime)-15s %(message)s')NEWLINENEWLINE parser = argparse.ArgumentParser()NEWLINE parser.add_argument("-a", "--arch",NEWLINE help="Architecture",NEWLINE type=str)NEWLINENEWLINE parser.add_argument("-l", "--list_arch",NEWLINE help="List architectures",NEWLINE action='store_true')NEWLINE args = parser.parse_args()NEWLINENEWLINE if args.list_arch:NEWLINE arches = get_arches()NEWLINE print(arches)NEWLINENEWLINE elif args.arch:NEWLINE sync_source()NEWLINE build(args.arch)NEWLINE collect_artifacts(args.arch)NEWLINENEWLINE else:NEWLINE arches = get_arches()NEWLINE logging.info("Building for all architectures: {}".format(arches))NEWLINE logging.info("Artifacts will be produced in the build/ directory.")NEWLINE sync_source()NEWLINE for arch in arches:NEWLINE build(arch)NEWLINE collect_artifacts(arch)NEWLINENEWLINE return 0NEWLINENEWLINEif __name__ == '__main__':NEWLINE sys.exit(main())NEWLINENEWLINE |
"""Support to select a date and/or a time."""NEWLINEimport datetimeNEWLINEimport loggingNEWLINEimport typingNEWLINENEWLINEimport voluptuous as volNEWLINENEWLINEfrom homeassistant.const import (NEWLINE ATTR_DATE,NEWLINE ATTR_EDITABLE,NEWLINE ATTR_TIME,NEWLINE CONF_ICON,NEWLINE CONF_ID,NEWLINE CONF_NAME,NEWLINE SERVICE_RELOAD,NEWLINE)NEWLINEfrom homeassistant.core import callbackNEWLINEfrom homeassistant.helpers import collectionNEWLINEimport homeassistant.helpers.config_validation as cvNEWLINEfrom homeassistant.helpers.entity_component import EntityComponentNEWLINEfrom homeassistant.helpers.restore_state import RestoreEntityNEWLINEimport homeassistant.helpers.serviceNEWLINEfrom homeassistant.helpers.storage import StoreNEWLINEfrom homeassistant.helpers.typing import ConfigType, HomeAssistantType, ServiceCallTypeNEWLINEfrom homeassistant.util import dt as dt_utilNEWLINENEWLINE_LOGGER = logging.getLogger(__name__)NEWLINENEWLINEDOMAIN = "input_datetime"NEWLINENEWLINECONF_HAS_DATE = "has_date"NEWLINECONF_HAS_TIME = "has_time"NEWLINECONF_INITIAL = "initial"NEWLINENEWLINEDEFAULT_VALUE = "1970-01-01 00:00:00"NEWLINEDEFAULT_DATE = datetime.date(1970, 1, 1)NEWLINEDEFAULT_TIME = datetime.time(0, 0, 0)NEWLINENEWLINEATTR_DATETIME = "datetime"NEWLINENEWLINESERVICE_SET_DATETIME = "set_datetime"NEWLINESTORAGE_KEY = DOMAINNEWLINESTORAGE_VERSION = 1NEWLINENEWLINECREATE_FIELDS = {NEWLINE vol.Required(CONF_NAME): vol.All(str, vol.Length(min=1)),NEWLINE vol.Optional(CONF_HAS_DATE, default=False): cv.boolean,NEWLINE vol.Optional(CONF_HAS_TIME, default=False): cv.boolean,NEWLINE vol.Optional(CONF_ICON): cv.icon,NEWLINE vol.Optional(CONF_INITIAL): cv.string,NEWLINE}NEWLINEUPDATE_FIELDS = {NEWLINE vol.Optional(CONF_NAME): cv.string,NEWLINE vol.Optional(CONF_HAS_DATE): cv.boolean,NEWLINE vol.Optional(CONF_HAS_TIME): cv.boolean,NEWLINE vol.Optional(CONF_ICON): cv.icon,NEWLINE vol.Optional(CONF_INITIAL): cv.string,NEWLINE}NEWLINENEWLINENEWLINEdef has_date_or_time(conf):NEWLINE """Check at least date or time is true."""NEWLINE if conf[CONF_HAS_DATE] or conf[CONF_HAS_TIME]:NEWLINE return confNEWLINENEWLINE raise vol.Invalid("Entity needs at least a date or a time")NEWLINENEWLINENEWLINECONFIG_SCHEMA = vol.Schema(NEWLINE {NEWLINE DOMAIN: cv.schema_with_slug_keys(NEWLINE vol.All(NEWLINE {NEWLINE vol.Optional(CONF_NAME): cv.string,NEWLINE vol.Optional(CONF_HAS_DATE, default=False): cv.boolean,NEWLINE vol.Optional(CONF_HAS_TIME, default=False): cv.boolean,NEWLINE vol.Optional(CONF_ICON): cv.icon,NEWLINE vol.Optional(CONF_INITIAL): cv.string,NEWLINE },NEWLINE has_date_or_time,NEWLINE )NEWLINE )NEWLINE },NEWLINE extra=vol.ALLOW_EXTRA,NEWLINE)NEWLINERELOAD_SERVICE_SCHEMA = vol.Schema({})NEWLINENEWLINENEWLINEasync def async_setup(hass: HomeAssistantType, config: ConfigType) -> bool:NEWLINE """Set up an input datetime."""NEWLINE component = EntityComponent(_LOGGER, DOMAIN, hass)NEWLINE id_manager = collection.IDManager()NEWLINENEWLINE yaml_collection = collection.YamlCollection(NEWLINE logging.getLogger(f"{__name__}.yaml_collection"), id_managerNEWLINE )NEWLINE collection.attach_entity_component_collection(NEWLINE component, yaml_collection, InputDatetime.from_yamlNEWLINE )NEWLINENEWLINE storage_collection = DateTimeStorageCollection(NEWLINE Store(hass, STORAGE_VERSION, STORAGE_KEY),NEWLINE logging.getLogger(f"{__name__}.storage_collection"),NEWLINE id_manager,NEWLINE )NEWLINE collection.attach_entity_component_collection(NEWLINE component, storage_collection, InputDatetimeNEWLINE )NEWLINENEWLINE await yaml_collection.async_load(NEWLINE [{CONF_ID: id_, **cfg} for id_, cfg in config.get(DOMAIN, {}).items()]NEWLINE )NEWLINE await storage_collection.async_load()NEWLINENEWLINE collection.StorageCollectionWebsocket(NEWLINE storage_collection, DOMAIN, DOMAIN, CREATE_FIELDS, UPDATE_FIELDSNEWLINE ).async_setup(hass)NEWLINENEWLINE collection.attach_entity_registry_cleaner(hass, DOMAIN, DOMAIN, yaml_collection)NEWLINE collection.attach_entity_registry_cleaner(hass, DOMAIN, DOMAIN, storage_collection)NEWLINENEWLINE async def reload_service_handler(service_call: ServiceCallType) -> None:NEWLINE """Reload yaml entities."""NEWLINE conf = await component.async_prepare_reload(skip_reset=True)NEWLINE if conf is None:NEWLINE conf = {DOMAIN: {}}NEWLINE await yaml_collection.async_load(NEWLINE [{CONF_ID: id_, **cfg} for id_, cfg in conf.get(DOMAIN, {}).items()]NEWLINE )NEWLINENEWLINE homeassistant.helpers.service.async_register_admin_service(NEWLINE hass,NEWLINE DOMAIN,NEWLINE SERVICE_RELOAD,NEWLINE reload_service_handler,NEWLINE schema=RELOAD_SERVICE_SCHEMA,NEWLINE )NEWLINENEWLINE async def async_set_datetime_service(entity, call):NEWLINE """Handle a call to the input datetime 'set datetime' service."""NEWLINE time = call.data.get(ATTR_TIME)NEWLINE date = call.data.get(ATTR_DATE)NEWLINE dttm = call.data.get(ATTR_DATETIME)NEWLINE if (NEWLINE dttmNEWLINE and (date or time)NEWLINE or entity.has_dateNEWLINE and not (date or dttm)NEWLINE or entity.has_timeNEWLINE and not (time or dttm)NEWLINE ):NEWLINE _LOGGER.error(NEWLINE "Invalid service data for %s input_datetime.set_datetime: %s",NEWLINE entity.entity_id,NEWLINE str(call.data),NEWLINE )NEWLINE returnNEWLINENEWLINE if dttm:NEWLINE date = dttm.date()NEWLINE time = dttm.time()NEWLINE entity.async_set_datetime(date, time)NEWLINENEWLINE component.async_register_entity_service(NEWLINE SERVICE_SET_DATETIME,NEWLINE {NEWLINE vol.Optional(ATTR_DATE): cv.date,NEWLINE vol.Optional(ATTR_TIME): cv.time,NEWLINE vol.Optional(ATTR_DATETIME): cv.datetime,NEWLINE },NEWLINE async_set_datetime_service,NEWLINE )NEWLINENEWLINE return TrueNEWLINENEWLINENEWLINEclass DateTimeStorageCollection(collection.StorageCollection):NEWLINE """Input storage based collection."""NEWLINENEWLINE CREATE_SCHEMA = vol.Schema(vol.All(CREATE_FIELDS, has_date_or_time))NEWLINE UPDATE_SCHEMA = vol.Schema(UPDATE_FIELDS)NEWLINENEWLINE async def _process_create_data(self, data: typing.Dict) -> typing.Dict:NEWLINE """Validate the config is valid."""NEWLINE return self.CREATE_SCHEMA(data)NEWLINENEWLINE @callbackNEWLINE def _get_suggested_id(self, info: typing.Dict) -> str:NEWLINE """Suggest an ID based on the config."""NEWLINE return info[CONF_NAME]NEWLINENEWLINE async def _update_data(self, data: dict, update_data: typing.Dict) -> typing.Dict:NEWLINE """Return a new updated data object."""NEWLINE update_data = self.UPDATE_SCHEMA(update_data)NEWLINE return has_date_or_time({**data, **update_data})NEWLINENEWLINENEWLINEclass InputDatetime(RestoreEntity):NEWLINE """Representation of a datetime input."""NEWLINENEWLINE def __init__(self, config: typing.Dict) -> None:NEWLINE """Initialize a select input."""NEWLINE self._config = configNEWLINE self.editable = TrueNEWLINE self._current_datetime = NoneNEWLINE initial = config.get(CONF_INITIAL)NEWLINE if initial:NEWLINE if self.has_date and self.has_time:NEWLINE self._current_datetime = dt_util.parse_datetime(initial)NEWLINE elif self.has_date:NEWLINE date = dt_util.parse_date(initial)NEWLINE self._current_datetime = datetime.datetime.combine(date, DEFAULT_TIME)NEWLINE else:NEWLINE time = dt_util.parse_time(initial)NEWLINE self._current_datetime = datetime.datetime.combine(DEFAULT_DATE, time)NEWLINENEWLINE @classmethodNEWLINE def from_yaml(cls, config: typing.Dict) -> "InputDatetime":NEWLINE """Return entity instance initialized from yaml storage."""NEWLINE input_dt = cls(config)NEWLINE input_dt.entity_id = f"{DOMAIN}.{config[CONF_ID]}"NEWLINE input_dt.editable = FalseNEWLINE return input_dtNEWLINENEWLINE async def async_added_to_hass(self):NEWLINE """Run when entity about to be added."""NEWLINE await super().async_added_to_hass()NEWLINENEWLINE # Priority 1: Initial valueNEWLINE if self.state is not None:NEWLINE returnNEWLINENEWLINE # Priority 2: Old stateNEWLINE old_state = await self.async_get_last_state()NEWLINE if old_state is None:NEWLINE self._current_datetime = dt_util.parse_datetime(DEFAULT_VALUE)NEWLINE returnNEWLINENEWLINE if self.has_date and self.has_time:NEWLINE date_time = dt_util.parse_datetime(old_state.state)NEWLINE if date_time is None:NEWLINE self._current_datetime = dt_util.parse_datetime(DEFAULT_VALUE)NEWLINE returnNEWLINE self._current_datetime = date_timeNEWLINE elif self.has_date:NEWLINE date = dt_util.parse_date(old_state.state)NEWLINE if date is None:NEWLINE self._current_datetime = dt_util.parse_datetime(DEFAULT_VALUE)NEWLINE returnNEWLINE self._current_datetime = datetime.datetime.combine(date, DEFAULT_TIME)NEWLINE else:NEWLINE time = dt_util.parse_time(old_state.state)NEWLINE if time is None:NEWLINE self._current_datetime = dt_util.parse_datetime(DEFAULT_VALUE)NEWLINE returnNEWLINE self._current_datetime = datetime.datetime.combine(DEFAULT_DATE, time)NEWLINENEWLINE @propertyNEWLINE def should_poll(self):NEWLINE """If entity should be polled."""NEWLINE return FalseNEWLINENEWLINE @propertyNEWLINE def name(self):NEWLINE """Return the name of the select input."""NEWLINE return self._config.get(CONF_NAME)NEWLINENEWLINE @propertyNEWLINE def has_date(self) -> bool:NEWLINE """Return True if entity has date."""NEWLINE return self._config[CONF_HAS_DATE]NEWLINENEWLINE @propertyNEWLINE def has_time(self) -> bool:NEWLINE """Return True if entity has time."""NEWLINE return self._config[CONF_HAS_TIME]NEWLINENEWLINE @propertyNEWLINE def icon(self):NEWLINE """Return the icon to be used for this entity."""NEWLINE return self._config.get(CONF_ICON)NEWLINENEWLINE @propertyNEWLINE def state(self):NEWLINE """Return the state of the component."""NEWLINE if self._current_datetime is None:NEWLINE return NoneNEWLINENEWLINE if self.has_date and self.has_time:NEWLINE return self._current_datetimeNEWLINE if self.has_date:NEWLINE return self._current_datetime.date()NEWLINE return self._current_datetime.time()NEWLINENEWLINE @propertyNEWLINE def state_attributes(self):NEWLINE """Return the state attributes."""NEWLINE attrs = {NEWLINE ATTR_EDITABLE: self.editable,NEWLINE CONF_HAS_DATE: self.has_date,NEWLINE CONF_HAS_TIME: self.has_time,NEWLINE }NEWLINENEWLINE if self._current_datetime is None:NEWLINE return attrsNEWLINENEWLINE if self.has_date and self._current_datetime is not None:NEWLINE attrs["year"] = self._current_datetime.yearNEWLINE attrs["month"] = self._current_datetime.monthNEWLINE attrs["day"] = self._current_datetime.dayNEWLINENEWLINE if self.has_time and self._current_datetime is not None:NEWLINE attrs["hour"] = self._current_datetime.hourNEWLINE attrs["minute"] = self._current_datetime.minuteNEWLINE attrs["second"] = self._current_datetime.secondNEWLINENEWLINE if not self.has_date:NEWLINE attrs["timestamp"] = (NEWLINE self._current_datetime.hour * 3600NEWLINE + self._current_datetime.minute * 60NEWLINE + self._current_datetime.secondNEWLINE )NEWLINE elif not self.has_time:NEWLINE extended = datetime.datetime.combine(NEWLINE self._current_datetime, datetime.time(0, 0)NEWLINE )NEWLINE attrs["timestamp"] = extended.timestamp()NEWLINE else:NEWLINE attrs["timestamp"] = self._current_datetime.timestamp()NEWLINENEWLINE return attrsNEWLINENEWLINE @propertyNEWLINE def unique_id(self) -> typing.Optional[str]:NEWLINE """Return unique id of the entity."""NEWLINE return self._config[CONF_ID]NEWLINENEWLINE @callbackNEWLINE def async_set_datetime(self, date_val, time_val):NEWLINE """Set a new date / time."""NEWLINE if self.has_date and self.has_time and date_val and time_val:NEWLINE self._current_datetime = datetime.datetime.combine(date_val, time_val)NEWLINE elif self.has_date and not self.has_time and date_val:NEWLINE self._current_datetime = datetime.datetime.combine(NEWLINE date_val, self._current_datetime.time()NEWLINE )NEWLINE if self.has_time and not self.has_date and time_val:NEWLINE self._current_datetime = datetime.datetime.combine(NEWLINE self._current_datetime.date(), time_valNEWLINE )NEWLINENEWLINE self.async_write_ha_state()NEWLINENEWLINE async def async_update_config(self, config: typing.Dict) -> None:NEWLINE """Handle when the config is updated."""NEWLINE self._config = configNEWLINE self.async_write_ha_state()NEWLINE |
from abc import ABCNEWLINEimport loggingNEWLINENEWLINEimport ci_sdrNEWLINEimport fast_bss_evalNEWLINEimport torchNEWLINENEWLINENEWLINEfrom espnet2.enh.loss.criterions.abs_loss import AbsEnhLossNEWLINENEWLINENEWLINEclass TimeDomainLoss(AbsEnhLoss, ABC):NEWLINE """Base class for all time-domain Enhancement loss modules."""NEWLINENEWLINE passNEWLINENEWLINENEWLINEEPS = torch.finfo(torch.get_default_dtype()).epsNEWLINENEWLINENEWLINEclass CISDRLoss(TimeDomainLoss):NEWLINE """CI-SDR lossNEWLINENEWLINE Reference:NEWLINE Convolutive Transfer Function Invariant SDR TrainingNEWLINE Criteria for Multi-Channel Reverberant Speech Separation;NEWLINE C. Boeddeker et al., 2021;NEWLINE https://arxiv.org/abs/2011.15003NEWLINE Args:NEWLINE ref: (Batch, samples)NEWLINE inf: (Batch, samples)NEWLINE filter_length (int): a time-invariant filter that allowsNEWLINE slight distortion via filteringNEWLINE Returns:NEWLINE loss: (Batch,)NEWLINE """NEWLINENEWLINE def __init__(self, filter_length=512, name=None):NEWLINE super().__init__()NEWLINE self.filter_length = filter_lengthNEWLINENEWLINE self._name = "ci_sdr_loss" if name is None else nameNEWLINENEWLINE @propertyNEWLINE def name(self) -> str:NEWLINE return self._nameNEWLINENEWLINE def forward(NEWLINE self,NEWLINE ref: torch.Tensor,NEWLINE inf: torch.Tensor,NEWLINE ) -> torch.Tensor:NEWLINENEWLINE assert ref.shape == inf.shape, (ref.shape, inf.shape)NEWLINENEWLINE return ci_sdr.pt.ci_sdr_loss(NEWLINE inf, ref, compute_permutation=False, filter_length=self.filter_lengthNEWLINE )NEWLINENEWLINENEWLINEclass SNRLoss(TimeDomainLoss):NEWLINE def __init__(self, eps=EPS, name=None):NEWLINE super().__init__()NEWLINE self.eps = float(eps)NEWLINENEWLINE self._name = "snr_loss" if name is None else nameNEWLINENEWLINE @propertyNEWLINE def name(self) -> str:NEWLINE return self._nameNEWLINENEWLINE def forward(NEWLINE self,NEWLINE ref: torch.Tensor,NEWLINE inf: torch.Tensor,NEWLINE ) -> torch.Tensor:NEWLINE # the return tensor should be shape of (batch,)NEWLINENEWLINE noise = inf - refNEWLINENEWLINE snr = 20 * (NEWLINE torch.log10(torch.norm(ref, p=2, dim=1).clamp(min=self.eps))NEWLINE - torch.log10(torch.norm(noise, p=2, dim=1).clamp(min=self.eps))NEWLINE )NEWLINE return -snrNEWLINENEWLINENEWLINEclass SDRLoss(TimeDomainLoss):NEWLINE """SDR loss.NEWLINENEWLINE filter_length: intNEWLINE The length of the distortion filter allowed (default: ``512``)NEWLINE use_cg_iter:NEWLINE If provided, an iterative method is used to solve for the distortionNEWLINE filter coefficients instead of direct Gaussian elimination.NEWLINE This can speed up the computation of the metrics in case the filtersNEWLINE are long. Using a value of 10 here has been shown to provideNEWLINE good accuracy in most cases and is sufficient when using thisNEWLINE loss to train neural separation networks.NEWLINE clamp_db: floatNEWLINE clamp the output value in [-clamp_db, clamp_db]NEWLINE zero_mean: boolNEWLINE When set to True, the mean of all signals is subtracted prior.NEWLINE load_diag:NEWLINE If provided, this small value is added to the diagonal coefficients ofNEWLINE the system metrics when solving for the filter coefficients.NEWLINE This can help stabilize the metric in the case where some of the referenceNEWLINE signals may sometimes be zeroNEWLINE """NEWLINENEWLINE def __init__(NEWLINE self,NEWLINE filter_length=512,NEWLINE use_cg_iter=None,NEWLINE clamp_db=None,NEWLINE zero_mean=True,NEWLINE load_diag=None,NEWLINE name=None,NEWLINE ):NEWLINE super().__init__()NEWLINENEWLINE self.filter_length = filter_lengthNEWLINE self.use_cg_iter = use_cg_iterNEWLINE self.clamp_db = clamp_dbNEWLINE self.zero_mean = zero_meanNEWLINE self.load_diag = load_diagNEWLINENEWLINE self._name = "sdr_loss" if name is None else nameNEWLINENEWLINE @propertyNEWLINE def name(self) -> str:NEWLINE return self._nameNEWLINENEWLINE def forward(NEWLINE self,NEWLINE ref: torch.Tensor,NEWLINE est: torch.Tensor,NEWLINE ) -> torch.Tensor:NEWLINE """SDR forward.NEWLINENEWLINE Args:NEWLINE ref: Tensor, (..., n_samples)NEWLINE reference signalNEWLINE est: Tensor (..., n_samples)NEWLINE estimated signalNEWLINENEWLINE Returns:NEWLINE loss: (...,)NEWLINE the SDR loss (negative sdr)NEWLINE """NEWLINENEWLINE sdr_loss = fast_bss_eval.sdr_loss(NEWLINE est=est,NEWLINE ref=ref,NEWLINE filter_length=self.filter_length,NEWLINE use_cg_iter=self.use_cg_iter,NEWLINE zero_mean=self.zero_mean,NEWLINE clamp_db=self.clamp_db,NEWLINE load_diag=self.load_diag,NEWLINE pairwise=False,NEWLINE )NEWLINENEWLINE return sdr_lossNEWLINENEWLINENEWLINEclass SISNRLoss(TimeDomainLoss):NEWLINE """SI-SNR (or named SI-SDR) lossNEWLINENEWLINE A more stable SI-SNR loss with clamp from `fast_bss_eval`.NEWLINENEWLINE Attributes:NEWLINE clamp_db: floatNEWLINE clamp the output value in [-clamp_db, clamp_db]NEWLINE zero_mean: boolNEWLINE When set to True, the mean of all signals is subtracted prior.NEWLINE eps: floatNEWLINE Deprecated. Keeped for compatibility.NEWLINE """NEWLINENEWLINE def __init__(self, clamp_db=None, zero_mean=True, eps=None, name=None):NEWLINE super().__init__()NEWLINE self.clamp_db = clamp_dbNEWLINE self.zero_mean = zero_meanNEWLINE if eps is not None:NEWLINE logging.warning("Eps is deprecated in si_snr loss, set clamp_db instead.")NEWLINENEWLINE self._name = "si_snr_loss" if name is None else nameNEWLINENEWLINE @propertyNEWLINE def name(self) -> str:NEWLINE return self._nameNEWLINENEWLINE def forward(NEWLINE self,NEWLINE ref: torch.Tensor,NEWLINE est: torch.Tensor,NEWLINE ) -> torch.Tensor:NEWLINE """SI-SNR forward.NEWLINENEWLINE Args:NEWLINENEWLINE ref: Tensor, (..., n_samples)NEWLINE reference signalNEWLINE est: Tensor (..., n_samples)NEWLINE estimated signalNEWLINENEWLINE Returns:NEWLINE loss: (...,)NEWLINE the SI-SDR loss (negative si-sdr)NEWLINE """NEWLINENEWLINE si_snr = fast_bss_eval.si_sdr_loss(NEWLINE est=est,NEWLINE ref=ref,NEWLINE zero_mean=self.zero_mean,NEWLINE clamp_db=self.clamp_db,NEWLINE pairwise=False,NEWLINE )NEWLINENEWLINE return si_snrNEWLINENEWLINENEWLINEclass TimeDomainMSE(TimeDomainLoss):NEWLINE def __init__(self, name=None):NEWLINE super().__init__()NEWLINE self._name = "TD_MSE_loss" if name is None else nameNEWLINENEWLINE @propertyNEWLINE def name(self) -> str:NEWLINE return self._nameNEWLINENEWLINE def forward(self, ref, inf) -> torch.Tensor:NEWLINE """Time-domain MSE loss forward.NEWLINENEWLINE Args:NEWLINE ref: (Batch, T) or (Batch, T, C)NEWLINE inf: (Batch, T) or (Batch, T, C)NEWLINE Returns:NEWLINE loss: (Batch,)NEWLINE """NEWLINE assert ref.shape == inf.shape, (ref.shape, inf.shape)NEWLINENEWLINE mseloss = (ref - inf).pow(2)NEWLINE if ref.dim() == 3:NEWLINE mseloss = mseloss.mean(dim=[1, 2])NEWLINE elif ref.dim() == 2:NEWLINE mseloss = mseloss.mean(dim=1)NEWLINE else:NEWLINE raise ValueError(NEWLINE "Invalid input shape: ref={}, inf={}".format(ref.shape, inf.shape)NEWLINE )NEWLINE return mselossNEWLINENEWLINENEWLINEclass TimeDomainL1(TimeDomainLoss):NEWLINE def __init__(self, name=None):NEWLINE super().__init__()NEWLINE self._name = "TD_L1_loss" if name is None else nameNEWLINENEWLINE @propertyNEWLINE def name(self) -> str:NEWLINE return self._nameNEWLINENEWLINE def forward(self, ref, inf) -> torch.Tensor:NEWLINE """Time-domain L1 loss forward.NEWLINENEWLINE Args:NEWLINE ref: (Batch, T) or (Batch, T, C)NEWLINE inf: (Batch, T) or (Batch, T, C)NEWLINE Returns:NEWLINE loss: (Batch,)NEWLINE """NEWLINE assert ref.shape == inf.shape, (ref.shape, inf.shape)NEWLINENEWLINE l1loss = abs(ref - inf)NEWLINE if ref.dim() == 3:NEWLINE l1loss = l1loss.mean(dim=[1, 2])NEWLINE elif ref.dim() == 2:NEWLINE l1loss = l1loss.mean(dim=1)NEWLINE else:NEWLINE raise ValueError(NEWLINE "Invalid input shape: ref={}, inf={}".format(ref.shape, inf.shape)NEWLINE )NEWLINE return l1lossNEWLINE |
from decimal import DecimalNEWLINENEWLINEimport jsonNEWLINEimport requestsNEWLINENEWLINEfrom cypherpunkpay.prices.price_source import PriceSourceNEWLINENEWLINENEWLINEclass CwatchCoinPriceSource(PriceSource):NEWLINENEWLINE def get(self, coin: str, fiat: str) -> [Decimal, None]:NEWLINE if fiat != 'usd':NEWLINE raise Exception(f'Unsupported fiat {fiat}')NEWLINE try:NEWLINE parsed_json = self._http_client.get_accepting_linkability(f'https://billboard.service.cryptowat.ch/assets?quote=usd&limit=50&sort=marketcap').json()NEWLINE except requests.exceptions.RequestException:NEWLINE return NoneNEWLINE except json.decoder.JSONDecodeError:NEWLINE return NoneNEWLINENEWLINE coins_json = parsed_json['result']['rows']NEWLINE for coin_json in coins_json:NEWLINE if coin_json['symbol'].casefold() == coin:NEWLINE return Decimal(coin_json['price'])NEWLINE |
def map_list(list, key, default=None):NEWLINE return filter(None, (item.get(key, default) for item in list))NEWLINENEWLINENEWLINEclass FilterModule(object):NEWLINE def filters(self):NEWLINE return {NEWLINE 'select': map_listNEWLINE }NEWLINE |
import osNEWLINEimport numpy as npNEWLINEimport cv2NEWLINEimport torchNEWLINEimport torchvisionNEWLINEimport carlaNEWLINENEWLINEfrom PIL import Image, ImageDrawNEWLINENEWLINEfrom carla_project.src.image_model import ImageModelNEWLINEfrom carla_project.src.converter import ConverterNEWLINENEWLINEfrom team_code.base_agent import BaseAgentNEWLINEfrom team_code.pid_controller import PIDControllerNEWLINE# additionNEWLINEimport datetimeNEWLINEimport pathlibNEWLINENEWLINEDEBUG = int(os.environ.get('HAS_DISPLAY', 0))NEWLINENEWLINE# additionNEWLINEfrom carla_project.src.carla_env import draw_traffic_lights, get_nearby_lightsNEWLINEfrom carla_project.src.common import CONVERTER, COLORNEWLINEfrom srunner.scenariomanager.carla_data_provider import CarlaDataProviderNEWLINEfrom local_planner import LocalPlannerNEWLINEimport jsonNEWLINEdef get_entry_point():NEWLINE return 'ImageAgent'NEWLINENEWLINENEWLINEdef debug_display(tick_data, target_cam, out, steer, throttle, brake, desired_speed, step):NEWLINE # modificationNEWLINENEWLINE # rgb = np.hstack((tick_data['rgb_left'], tick_data['rgb'], tick_data['rgb_right']))NEWLINENEWLINENEWLINE _rgb = Image.fromarray(tick_data['rgb'])NEWLINE _draw_rgb = ImageDraw.Draw(_rgb)NEWLINE _draw_rgb.ellipse((target_cam[0]-3,target_cam[1]-3,target_cam[0]+3,target_cam[1]+3), (255, 255, 255))NEWLINENEWLINE for x, y in out:NEWLINE x = (x + 1) / 2 * 256NEWLINE y = (y + 1) / 2 * 144NEWLINENEWLINE _draw_rgb.ellipse((x-2, y-2, x+2, y+2), (0, 0, 255))NEWLINENEWLINE _combined = Image.fromarray(np.hstack([tick_data['rgb_left'], _rgb, tick_data['rgb_right']]))NEWLINENEWLINE _combined = _combined.resize((int(256 / _combined.size[1] * _combined.size[0]), 256))NEWLINE _topdown = Image.fromarray(COLOR[CONVERTER[tick_data['topdown']]])NEWLINE _topdown.thumbnail((256, 256))NEWLINE _combined = Image.fromarray(np.hstack((_combined, _topdown)))NEWLINENEWLINENEWLINE _draw = ImageDraw.Draw(_combined)NEWLINE _draw.text((5, 10), 'Steer: %.3f' % steer)NEWLINE _draw.text((5, 30), 'Throttle: %.3f' % throttle)NEWLINE _draw.text((5, 50), 'Brake: %s' % brake)NEWLINE _draw.text((5, 70), 'Speed: %.3f' % tick_data['speed'])NEWLINE _draw.text((5, 90), 'Desired: %.3f' % desired_speed)NEWLINE _draw.text((5, 110), 'Far Command: %s' % str(tick_data['far_command']))NEWLINENEWLINE cv2.imshow('map', cv2.cvtColor(np.array(_combined), cv2.COLOR_BGR2RGB))NEWLINE cv2.waitKey(1)NEWLINENEWLINENEWLINEclass ImageAgent(BaseAgent):NEWLINE def setup(self, path_to_conf_file):NEWLINE super().setup(path_to_conf_file)NEWLINENEWLINE self.converter = Converter()NEWLINE self.net = ImageModel.load_from_checkpoint(path_to_conf_file)NEWLINE self.net.cuda()NEWLINE self.net.eval()NEWLINENEWLINENEWLINENEWLINENEWLINE # addition: modified from leaderboard/team_code/auto_pilot.pyNEWLINE def save(self, steer, throttle, brake, tick_data):NEWLINE # frame = self.step // 10NEWLINE frame = self.stepNEWLINENEWLINE pos = self._get_position(tick_data)NEWLINE far_command = tick_data['far_command']NEWLINE speed = tick_data['speed']NEWLINENEWLINENEWLINENEWLINE center = os.path.join('rgb', ('%04d.png' % frame))NEWLINE left = os.path.join('rgb_left', ('%04d.png' % frame))NEWLINE right = os.path.join('rgb_right', ('%04d.png' % frame))NEWLINE topdown = os.path.join('topdown', ('%04d.png' % frame))NEWLINE rgb_with_car = os.path.join('rgb_with_car', ('%04d.png' % frame))NEWLINENEWLINE data_row = ','.join([str(i) for i in [frame, far_command, speed, steer, throttle, brake, str(center), str(left), str(right)]])NEWLINE with (self.save_path / 'measurements.csv' ).open("a") as f_out:NEWLINE f_out.write(data_row+'\n')NEWLINENEWLINENEWLINE Image.fromarray(tick_data['rgb']).save(self.save_path / center)NEWLINE Image.fromarray(tick_data['rgb_left']).save(self.save_path / left)NEWLINE Image.fromarray(tick_data['rgb_right']).save(self.save_path / right)NEWLINENEWLINE # additionNEWLINE Image.fromarray(COLOR[CONVERTER[tick_data['topdown']]]).save(self.save_path / topdown)NEWLINE Image.fromarray(tick_data['rgb_with_car']).save(self.save_path / rgb_with_car)NEWLINENEWLINENEWLINENEWLINE ########################################################################NEWLINE # log necessary info for action-basedNEWLINE if self.args.save_action_based_measurements:NEWLINE from affordances import get_driving_affordancesNEWLINENEWLINE self._pedestrian_forbidden_distance = 10.0NEWLINE self._pedestrian_max_detected_distance = 50.0NEWLINE self._vehicle_forbidden_distance = 10.0NEWLINE self._vehicle_max_detected_distance = 50.0NEWLINE self._tl_forbidden_distance = 10.0NEWLINE self._tl_max_detected_distance = 50.0NEWLINE self._speed_detected_distance = 10.0NEWLINENEWLINE self._default_target_speed = 10NEWLINE self._angle = NoneNEWLINENEWLINE current_affordances = get_driving_affordances(self, self._pedestrian_forbidden_distance, self._pedestrian_max_detected_distance, self._vehicle_forbidden_distance, self._vehicle_max_detected_distance, self._tl_forbidden_distance, self._tl_max_detected_distance, self._angle_rad, self._default_target_speed, self._target_speed, self._speed_detected_distance, angle=True)NEWLINENEWLINE is_vehicle_hazard = current_affordances['is_vehicle_hazard']NEWLINE is_red_tl_hazard = current_affordances['is_red_tl_hazard']NEWLINE is_pedestrian_hazard = current_affordances['is_pedestrian_hazard']NEWLINE forward_speed = current_affordances['forward_speed']NEWLINE relative_angle = current_affordances['relative_angle']NEWLINE target_speed = current_affordances['target_speed']NEWLINE closest_pedestrian_distance = current_affordances['closest_pedestrian_distance']NEWLINE closest_vehicle_distance = current_affordances['closest_vehicle_distance']NEWLINE closest_red_tl_distance = current_affordances['closest_red_tl_distance']NEWLINENEWLINENEWLINENEWLINE log_folder = str(self.save_path / 'affordance_measurements')NEWLINE if not os.path.exists(log_folder):NEWLINE os.mkdir(log_folder)NEWLINE log_path = os.path.join(log_folder, f'{self.step:06}.json')NEWLINENEWLINENEWLINE ego_transform = self._vehicle.get_transform()NEWLINE ego_location = ego_transform.locationNEWLINE ego_rotation = ego_transform.rotationNEWLINE ego_velocity = self._vehicle.get_velocity()NEWLINENEWLINE brake_noise = 0.0NEWLINE throttle_noise = 0.0 # 1.0 -> 0.0NEWLINE steer_noise = 0.0 # NaN -> 0.0NEWLINENEWLINE # class RoadOptionNEWLINE map_roadoption_to_action_based_roadoption = {-1:2, 1:3, 2:4, 3:5, 4:2, 5:2, 6:2}NEWLINENEWLINE # save info for action-based repNEWLINE json_log_data = {NEWLINE "brake": float(brake),NEWLINE "closest_red_tl_distance": closest_red_tl_distance,NEWLINE "throttle": throttle,NEWLINE "directions": float(map_roadoption_to_action_based_roadoption[far_command.value]),NEWLINE "brake_noise": brake_noise,NEWLINE "is_red_tl_hazard": is_red_tl_hazard,NEWLINE "opponents": {},NEWLINE "closest_pedestrian_distance": closest_pedestrian_distance,NEWLINE "is_pedestrian_hazard": is_pedestrian_hazard,NEWLINE "lane": {},NEWLINE "is_vehicle_hazard": is_vehicle_hazard,NEWLINE "throttle_noise": throttle_noise,NEWLINE "ego_actor": {NEWLINE "velocity": [NEWLINE ego_velocity.x,NEWLINE ego_velocity.y,NEWLINE ego_velocity.zNEWLINE ],NEWLINE "position": [NEWLINE ego_location.x,NEWLINE ego_location.y,NEWLINE ego_location.zNEWLINE ],NEWLINE "orientation": [NEWLINE ego_rotation.roll,NEWLINE ego_rotation.pitch,NEWLINE ego_rotation.yawNEWLINE ]NEWLINE },NEWLINE "hand_brake": False,NEWLINE "steer_noise": steer_noise,NEWLINE "reverse": False,NEWLINE "relative_angle": relative_angle,NEWLINE "closest_vehicle_distance": closest_vehicle_distance,NEWLINE "walkers": {},NEWLINE "forward_speed": forward_speed,NEWLINE "steer": steer,NEWLINE "target_speed": target_speedNEWLINE }NEWLINENEWLINE with open(log_path, 'w') as f_out:NEWLINE json.dump(json_log_data, f_out, indent=4)NEWLINENEWLINENEWLINE def _init(self):NEWLINE super()._init()NEWLINENEWLINE self._turn_controller = PIDController(K_P=1.25, K_I=0.75, K_D=0.3, n=40)NEWLINE self._speed_controller = PIDController(K_P=5.0, K_I=0.5, K_D=1.0, n=40)NEWLINENEWLINE # addition:NEWLINE self._vehicle = CarlaDataProvider.get_hero_actor()NEWLINE self._world = self._vehicle.get_world()NEWLINENEWLINENEWLINE # -------------------------------------------------------NEWLINE # add a local_planner in order to estimate relative angleNEWLINE # self.target_speed = 10NEWLINE # args_lateral_dict = {NEWLINE # 'K_P': 1,NEWLINE # 'K_D': 0.4,NEWLINE # 'K_I': 0,NEWLINE # 'dt': 1.0/10.0}NEWLINE # self._local_planner = LocalPlanner(NEWLINE # self._vehicle, opt_dict={'target_speed' : self.target_speed,NEWLINE # 'lateral_control_dict':args_lateral_dict})NEWLINE # self._hop_resolution = 2.0NEWLINE # self._path_seperation_hop = 2NEWLINE # self._path_seperation_threshold = 0.5NEWLINE # self._grp = NoneNEWLINE #NEWLINE # self._map = CarlaDataProvider.get_map()NEWLINE # route = [(self._map.get_waypoint(x[0].location), x[1]) for x in self._global_plan_world_coord]NEWLINE #NEWLINE # self._local_planner.set_global_plan(route)NEWLINENEWLINENEWLINE def tick(self, input_data):NEWLINENEWLINENEWLINENEWLINE result = super().tick(input_data)NEWLINE result['image'] = np.concatenate(tuple(result[x] for x in ['rgb', 'rgb_left', 'rgb_right']), -1)NEWLINENEWLINENEWLINE rgb_with_car = cv2.cvtColor(input_data['rgb_with_car'][1][:, :, :3], cv2.COLOR_BGR2RGB)NEWLINE result['rgb_with_car'] = rgb_with_carNEWLINENEWLINE result['radar_central'] = input_data['radar_central']NEWLINENEWLINE theta = result['compass']NEWLINE theta = 0.0 if np.isnan(theta) else thetaNEWLINE theta = theta + np.pi / 2NEWLINE R = np.array([NEWLINE [np.cos(theta), -np.sin(theta)],NEWLINE [np.sin(theta), np.cos(theta)],NEWLINE ])NEWLINENEWLINE gps = self._get_position(result)NEWLINE # modificationNEWLINE far_node, far_command = self._command_planner.run_step(gps)NEWLINE target = R.T.dot(far_node - gps)NEWLINE target *= 5.5NEWLINE target += [128, 256]NEWLINE target = np.clip(target, 0, 256)NEWLINENEWLINE result['target'] = targetNEWLINE # addition:NEWLINE self._actors = self._world.get_actors()NEWLINE self._traffic_lights = get_nearby_lights(self._vehicle, self._actors.filter('*traffic_light*'))NEWLINE result['far_command'] = far_commandNEWLINE topdown = input_data['map'][1][:, :, 2]NEWLINE topdown = draw_traffic_lights(topdown, self._vehicle, self._traffic_lights)NEWLINE result['topdown'] = topdownNEWLINENEWLINENEWLINE return resultNEWLINENEWLINE @torch.no_grad()NEWLINE def run_step_using_learned_controller(self, input_data, timestamp):NEWLINE if not self.initialized:NEWLINE self._init()NEWLINENEWLINE tick_data = self.tick(input_data)NEWLINENEWLINE img = torchvision.transforms.functional.to_tensor(tick_data['image'])NEWLINE img = img[None].cuda()NEWLINENEWLINE target = torch.from_numpy(tick_data['target'])NEWLINE target = target[None].cuda()NEWLINENEWLINE import randomNEWLINE torch.manual_seed(2)NEWLINE torch.cuda.manual_seed_all(2)NEWLINE torch.backends.cudnn.deterministic = TrueNEWLINE np.random.seed(1)NEWLINE random.seed(1)NEWLINE device = torch.device('cuda')NEWLINE torch.backends.cudnn.benchmark = FalseNEWLINENEWLINE points, (target_cam, _) = self.net.forward(img, target)NEWLINE control = self.net.controller(points).cpu().squeeze()NEWLINENEWLINE steer = control[0].item()NEWLINE desired_speed = control[1].item()NEWLINE speed = tick_data['speed']NEWLINENEWLINE brake = desired_speed < 0.4 or (speed / desired_speed) > 1.1NEWLINENEWLINE delta = np.clip(desired_speed - speed, 0.0, 0.25)NEWLINE throttle = self._speed_controller.step(delta)NEWLINE throttle = np.clip(throttle, 0.0, 0.75)NEWLINE throttle = throttle if not brake else 0.0NEWLINENEWLINE control = carla.VehicleControl()NEWLINE control.steer = steerNEWLINE control.throttle = throttleNEWLINE control.brake = float(brake)NEWLINENEWLINE if DEBUG:NEWLINE debug_display(NEWLINE tick_data, target_cam.squeeze(), points.cpu().squeeze(),NEWLINE steer, throttle, brake, desired_speed,NEWLINE self.step)NEWLINENEWLINE return controlNEWLINENEWLINE @torch.no_grad()NEWLINE def run_step(self, input_data, timestamp):NEWLINE if not self.initialized:NEWLINE self._init()NEWLINENEWLINE tick_data = self.tick(input_data)NEWLINE radar_data = tick_data['radar_central'][1]NEWLINENEWLINENEWLINENEWLINE img = torchvision.transforms.functional.to_tensor(tick_data['image'])NEWLINE img = img[None].cuda()NEWLINENEWLINENEWLINENEWLINENEWLINENEWLINE target = torch.from_numpy(tick_data['target'])NEWLINE target = target[None].cuda()NEWLINENEWLINE points, (target_cam, _) = self.net.forward(img, target)NEWLINE points_cam = points.clone().cpu()NEWLINENEWLINE if self.step == 0:NEWLINE print('\n'*5)NEWLINE print('step :', self.step)NEWLINE # print('radar')NEWLINE # print(radar_data.shape)NEWLINE # print(radar_data)NEWLINE # print(np.max(radar_data, axis=0))NEWLINE print('image', np.sum(tick_data['image']))NEWLINE # print('img', torch.sum(img))NEWLINE # print('target', target)NEWLINE # print('points', points)NEWLINE print('\n'*5)NEWLINENEWLINE points_cam[..., 0] = (points_cam[..., 0] + 1) / 2 * img.shape[-1]NEWLINE points_cam[..., 1] = (points_cam[..., 1] + 1) / 2 * img.shape[-2]NEWLINE points_cam = points_cam.squeeze()NEWLINE points_world = self.converter.cam_to_world(points_cam).numpy()NEWLINENEWLINENEWLINE aim = (points_world[1] + points_world[0]) / 2.0NEWLINE angle = np.degrees(np.pi / 2 - np.arctan2(aim[1], aim[0]))NEWLINE self._angle_rad = np.radians(angle)NEWLINE angle = angle / 90NEWLINE steer = self._turn_controller.step(angle)NEWLINE steer = np.clip(steer, -1.0, 1.0)NEWLINENEWLINE desired_speed = np.linalg.norm(points_world[0] - points_world[1]) * 2.0NEWLINE # desired_speed *= (1 - abs(angle)) ** 2NEWLINE self._target_speed = desired_speedNEWLINE speed = tick_data['speed']NEWLINENEWLINE brake = desired_speed < 0.4 or (speed / desired_speed) > 1.1NEWLINENEWLINE delta = np.clip(desired_speed - speed, 0.0, 0.25)NEWLINE throttle = self._speed_controller.step(delta)NEWLINE throttle = np.clip(throttle, 0.0, 0.75)NEWLINE throttle = throttle if not brake else 0.0NEWLINENEWLINE control = carla.VehicleControl()NEWLINE control.steer = steerNEWLINE control.throttle = throttleNEWLINE control.brake = float(brake)NEWLINENEWLINE if DEBUG:NEWLINE debug_display(tick_data, target_cam.squeeze(), points.cpu().squeeze(), steer, throttle, brake, desired_speed, self.step)NEWLINENEWLINE # addition: from leaderboard/team_code/auto_pilot.pyNEWLINE if self.step == 0:NEWLINE title_row = ','.join(['frame_id', 'far_command', 'speed', 'steering', 'throttle', 'brake', 'center', 'left', 'right'])NEWLINE with (self.save_path / 'measurements.csv' ).open("a") as f_out:NEWLINE f_out.write(title_row+'\n')NEWLINE if self.step % 1 == 0:NEWLINE self.gather_info((steer, throttle, float(brake), speed))NEWLINENEWLINE record_every_n_steps = self.record_every_n_stepNEWLINE if self.step % record_every_n_steps == 0:NEWLINE self.save(steer, throttle, brake, tick_data)NEWLINE return controlNEWLINE |
# Author: Steven J. Bethard <[email protected]>.NEWLINENEWLINE"""Command-line parsing libraryNEWLINENEWLINEThis module is an optparse-inspired command-line parsing library that:NEWLINENEWLINE - handles both optional and positional argumentsNEWLINE - produces highly informative usage messagesNEWLINE - supports parsers that dispatch to sub-parsersNEWLINENEWLINEThe following is a simple usage example that sums integers from theNEWLINEcommand-line and writes the result to a file::NEWLINENEWLINE parser = argparse.ArgumentParser(NEWLINE description='sum the integers at the command line')NEWLINE parser.add_argument(NEWLINE 'integers', metavar='int', nargs='+', type=int,NEWLINE help='an integer to be summed')NEWLINE parser.add_argument(NEWLINE '--log', default=sys.stdout, type=argparse.FileType('w'),NEWLINE help='the file where the sum should be written')NEWLINE args = parser.parse_args()NEWLINE args.log.write('%s' % sum(args.integers))NEWLINE args.log.close()NEWLINENEWLINEThe module contains the following public classes:NEWLINENEWLINE - ArgumentParser -- The main entry point for command-line parsing. As theNEWLINE example above shows, the add_argument() method is used to populateNEWLINE the parser with actions for optional and positional arguments. ThenNEWLINE the parse_args() method is invoked to convert the args at theNEWLINE command-line into an object with attributes.NEWLINENEWLINE - ArgumentError -- The exception raised by ArgumentParser objects whenNEWLINE there are errors with the parser's actions. Errors raised whileNEWLINE parsing the command-line are caught by ArgumentParser and emittedNEWLINE as command-line messages.NEWLINENEWLINE - FileType -- A factory for defining types of files to be created. As theNEWLINE example above shows, instances of FileType are typically passed asNEWLINE the type= argument of add_argument() calls.NEWLINENEWLINE - Action -- The base class for parser actions. Typically actions areNEWLINE selected by passing strings like 'store_true' or 'append_const' toNEWLINE the action= argument of add_argument(). However, for greaterNEWLINE customization of ArgumentParser actions, subclasses of Action mayNEWLINE be defined and passed as the action= argument.NEWLINENEWLINE - HelpFormatter, RawDescriptionHelpFormatter, RawTextHelpFormatter,NEWLINE ArgumentDefaultsHelpFormatter -- Formatter classes whichNEWLINE may be passed as the formatter_class= argument to theNEWLINE ArgumentParser constructor. HelpFormatter is the default,NEWLINE RawDescriptionHelpFormatter and RawTextHelpFormatter tell the parserNEWLINE not to change the formatting for help text, andNEWLINE ArgumentDefaultsHelpFormatter adds information about argument defaultsNEWLINE to the help.NEWLINENEWLINEAll other classes in this module are considered implementation details.NEWLINE(Also note that HelpFormatter and RawDescriptionHelpFormatter are onlyNEWLINEconsidered public as object names -- the API of the formatter objects isNEWLINEstill considered an implementation detail.)NEWLINE"""NEWLINENEWLINE__version__ = '1.1'NEWLINE__all__ = [NEWLINE 'ArgumentParser',NEWLINE 'ArgumentError',NEWLINE 'ArgumentTypeError',NEWLINE 'FileType',NEWLINE 'HelpFormatter',NEWLINE 'ArgumentDefaultsHelpFormatter',NEWLINE 'RawDescriptionHelpFormatter',NEWLINE 'RawTextHelpFormatter',NEWLINE 'Namespace',NEWLINE 'Action',NEWLINE 'ONE_OR_MORE',NEWLINE 'OPTIONAL',NEWLINE 'PARSER',NEWLINE 'REMAINDER',NEWLINE 'SUPPRESS',NEWLINE 'ZERO_OR_MORE',NEWLINE]NEWLINENEWLINENEWLINEimport collections as _collectionsNEWLINEimport copy as _copyNEWLINEimport os as _osNEWLINEimport re as _reNEWLINEimport sys as _sysNEWLINEimport textwrap as _textwrapNEWLINENEWLINEfrom gettext import gettext as _NEWLINENEWLINENEWLINEdef _callable(obj):NEWLINE return hasattr(obj, '__call__') or hasattr(obj, '__bases__')NEWLINENEWLINENEWLINESUPPRESS = '==SUPPRESS=='NEWLINENEWLINEOPTIONAL = '?'NEWLINEZERO_OR_MORE = '*'NEWLINEONE_OR_MORE = '+'NEWLINEPARSER = 'A...'NEWLINEREMAINDER = '...'NEWLINE_UNRECOGNIZED_ARGS_ATTR = '_unrecognized_args'NEWLINENEWLINE# =============================NEWLINE# Utility functions and classesNEWLINE# =============================NEWLINENEWLINEclass _AttributeHolder(object):NEWLINE """Abstract base class that provides __repr__.NEWLINENEWLINE The __repr__ method returns a string in the format::NEWLINE ClassName(attr=name, attr=name, ...)NEWLINE The attributes are determined either by a class-level attribute,NEWLINE '_kwarg_names', or by inspecting the instance __dict__.NEWLINE """NEWLINENEWLINE def __repr__(self):NEWLINE type_name = type(self).__name__NEWLINE arg_strings = []NEWLINE for arg in self._get_args():NEWLINE arg_strings.append(repr(arg))NEWLINE for name, value in self._get_kwargs():NEWLINE arg_strings.append('%s=%r' % (name, value))NEWLINE return '%s(%s)' % (type_name, ', '.join(arg_strings))NEWLINENEWLINE def _get_kwargs(self):NEWLINE return sorted(self.__dict__.items())NEWLINENEWLINE def _get_args(self):NEWLINE return []NEWLINENEWLINENEWLINEdef _ensure_value(namespace, name, value):NEWLINE if getattr(namespace, name, None) is None:NEWLINE setattr(namespace, name, value)NEWLINE return getattr(namespace, name)NEWLINENEWLINENEWLINE# ===============NEWLINE# Formatting HelpNEWLINE# ===============NEWLINENEWLINEclass HelpFormatter(object):NEWLINE """Formatter for generating usage messages and argument help strings.NEWLINENEWLINE Only the name of this class is considered a public API. All the methodsNEWLINE provided by the class are considered an implementation detail.NEWLINE """NEWLINENEWLINE def __init__(self,NEWLINE prog,NEWLINE indent_increment=2,NEWLINE max_help_position=24,NEWLINE width=None):NEWLINENEWLINE # default setting for widthNEWLINE if width is None:NEWLINE try:NEWLINE width = int(_os.environ['COLUMNS'])NEWLINE except (KeyError, ValueError):NEWLINE width = 80NEWLINE width -= 2NEWLINENEWLINE self._prog = progNEWLINE self._indent_increment = indent_incrementNEWLINE self._max_help_position = max_help_positionNEWLINE self._max_help_position = min(max_help_position,NEWLINE max(width - 20, indent_increment * 2))NEWLINE self._width = widthNEWLINENEWLINE self._current_indent = 0NEWLINE self._level = 0NEWLINE self._action_max_length = 0NEWLINENEWLINE self._root_section = self._Section(self, None)NEWLINE self._current_section = self._root_sectionNEWLINENEWLINE self._whitespace_matcher = _re.compile(r'\s+')NEWLINE self._long_break_matcher = _re.compile(r'\n\n\n+')NEWLINENEWLINE # ===============================NEWLINE # Section and indentation methodsNEWLINE # ===============================NEWLINE def _indent(self):NEWLINE self._current_indent += self._indent_incrementNEWLINE self._level += 1NEWLINENEWLINE def _dedent(self):NEWLINE self._current_indent -= self._indent_incrementNEWLINE assert self._current_indent >= 0, 'Indent decreased below 0.'NEWLINE self._level -= 1NEWLINENEWLINE class _Section(object):NEWLINENEWLINE def __init__(self, formatter, parent, heading=None):NEWLINE self.formatter = formatterNEWLINE self.parent = parentNEWLINE self.heading = headingNEWLINE self.items = []NEWLINENEWLINE def format_help(self):NEWLINE # format the indented sectionNEWLINE if self.parent is not None:NEWLINE self.formatter._indent()NEWLINE join = self.formatter._join_partsNEWLINE for func, args in self.items:NEWLINE func(*args)NEWLINE item_help = join([func(*args) for func, args in self.items])NEWLINE if self.parent is not None:NEWLINE self.formatter._dedent()NEWLINENEWLINE # return nothing if the section was emptyNEWLINE if not item_help:NEWLINE return ''NEWLINENEWLINE # add the heading if the section was non-emptyNEWLINE if self.heading is not SUPPRESS and self.heading is not None:NEWLINE current_indent = self.formatter._current_indentNEWLINE heading = '%*s%s:\n' % (current_indent, '', self.heading)NEWLINE else:NEWLINE heading = ''NEWLINENEWLINE # join the section-initial newline, the heading and the helpNEWLINE return join(['\n', heading, item_help, '\n'])NEWLINENEWLINE def _add_item(self, func, args):NEWLINE self._current_section.items.append((func, args))NEWLINENEWLINE # ========================NEWLINE # Message building methodsNEWLINE # ========================NEWLINE def start_section(self, heading):NEWLINE self._indent()NEWLINE section = self._Section(self, self._current_section, heading)NEWLINE self._add_item(section.format_help, [])NEWLINE self._current_section = sectionNEWLINENEWLINE def end_section(self):NEWLINE self._current_section = self._current_section.parentNEWLINE self._dedent()NEWLINENEWLINE def add_text(self, text):NEWLINE if text is not SUPPRESS and text is not None:NEWLINE self._add_item(self._format_text, [text])NEWLINENEWLINE def add_usage(self, usage, actions, groups, prefix=None):NEWLINE if usage is not SUPPRESS:NEWLINE args = usage, actions, groups, prefixNEWLINE self._add_item(self._format_usage, args)NEWLINENEWLINE def add_argument(self, action):NEWLINE if action.help is not SUPPRESS:NEWLINENEWLINE # find all invocationsNEWLINE get_invocation = self._format_action_invocationNEWLINE invocations = [get_invocation(action)]NEWLINE for subaction in self._iter_indented_subactions(action):NEWLINE invocations.append(get_invocation(subaction))NEWLINENEWLINE # update the maximum item lengthNEWLINE invocation_length = max([len(s) for s in invocations])NEWLINE action_length = invocation_length + self._current_indentNEWLINE self._action_max_length = max(self._action_max_length,NEWLINE action_length)NEWLINENEWLINE # add the item to the listNEWLINE self._add_item(self._format_action, [action])NEWLINENEWLINE def add_arguments(self, actions):NEWLINE for action in actions:NEWLINE self.add_argument(action)NEWLINENEWLINE # =======================NEWLINE # Help-formatting methodsNEWLINE # =======================NEWLINE def format_help(self):NEWLINE help = self._root_section.format_help()NEWLINE if help:NEWLINE help = self._long_break_matcher.sub('\n\n', help)NEWLINE help = help.strip('\n') + '\n'NEWLINE return helpNEWLINENEWLINE def _join_parts(self, part_strings):NEWLINE return ''.join([partNEWLINE for part in part_stringsNEWLINE if part and part is not SUPPRESS])NEWLINENEWLINE def _format_usage(self, usage, actions, groups, prefix):NEWLINE if prefix is None:NEWLINE prefix = _('usage: ')NEWLINENEWLINE # if usage is specified, use thatNEWLINE if usage is not None:NEWLINE usage = usage % dict(prog=self._prog)NEWLINENEWLINE # if no optionals or positionals are available, usage is just progNEWLINE elif usage is None and not actions:NEWLINE usage = '%(prog)s' % dict(prog=self._prog)NEWLINENEWLINE # if optionals and positionals are available, calculate usageNEWLINE elif usage is None:NEWLINE prog = '%(prog)s' % dict(prog=self._prog)NEWLINENEWLINE # split optionals from positionalsNEWLINE optionals = []NEWLINE positionals = []NEWLINE for action in actions:NEWLINE if action.option_strings:NEWLINE optionals.append(action)NEWLINE else:NEWLINE positionals.append(action)NEWLINENEWLINE # build full usage stringNEWLINE format = self._format_actions_usageNEWLINE action_usage = format(optionals + positionals, groups)NEWLINE usage = ' '.join([s for s in [prog, action_usage] if s])NEWLINENEWLINE # wrap the usage parts if it's too longNEWLINE text_width = self._width - self._current_indentNEWLINE if len(prefix) + len(usage) > text_width:NEWLINENEWLINE # break usage into wrappable partsNEWLINE part_regexp = (NEWLINE r'\(.*?\)+(?=\s|$)|'NEWLINE r'\[.*?\]+(?=\s|$)|'NEWLINE r'\S+'NEWLINE )NEWLINE opt_usage = format(optionals, groups)NEWLINE pos_usage = format(positionals, groups)NEWLINE opt_parts = _re.findall(part_regexp, opt_usage)NEWLINE pos_parts = _re.findall(part_regexp, pos_usage)NEWLINE assert ' '.join(opt_parts) == opt_usageNEWLINE assert ' '.join(pos_parts) == pos_usageNEWLINENEWLINE # helper for wrapping linesNEWLINE def get_lines(parts, indent, prefix=None):NEWLINE lines = []NEWLINE line = []NEWLINE if prefix is not None:NEWLINE line_len = len(prefix) - 1NEWLINE else:NEWLINE line_len = len(indent) - 1NEWLINE for part in parts:NEWLINE if line_len + 1 + len(part) > text_width and line:NEWLINE lines.append(indent + ' '.join(line))NEWLINE line = []NEWLINE line_len = len(indent) - 1NEWLINE line.append(part)NEWLINE line_len += len(part) + 1NEWLINE if line:NEWLINE lines.append(indent + ' '.join(line))NEWLINE if prefix is not None:NEWLINE lines[0] = lines[0][len(indent):]NEWLINE return linesNEWLINENEWLINE # if prog is short, follow it with optionals or positionalsNEWLINE if len(prefix) + len(prog) <= 0.75 * text_width:NEWLINE indent = ' ' * (len(prefix) + len(prog) + 1)NEWLINE if opt_parts:NEWLINE lines = get_lines([prog] + opt_parts, indent, prefix)NEWLINE lines.extend(get_lines(pos_parts, indent))NEWLINE elif pos_parts:NEWLINE lines = get_lines([prog] + pos_parts, indent, prefix)NEWLINE else:NEWLINE lines = [prog]NEWLINENEWLINE # if prog is long, put it on its own lineNEWLINE else:NEWLINE indent = ' ' * len(prefix)NEWLINE parts = opt_parts + pos_partsNEWLINE lines = get_lines(parts, indent)NEWLINE if len(lines) > 1:NEWLINE lines = []NEWLINE lines.extend(get_lines(opt_parts, indent))NEWLINE lines.extend(get_lines(pos_parts, indent))NEWLINE lines = [prog] + linesNEWLINENEWLINE # join lines into usageNEWLINE usage = '\n'.join(lines)NEWLINENEWLINE # prefix with 'usage:'NEWLINE return '%s%s\n\n' % (prefix, usage)NEWLINENEWLINE def _format_actions_usage(self, actions, groups):NEWLINE # find group indices and identify actions in groupsNEWLINE group_actions = set()NEWLINE inserts = {}NEWLINE for group in groups:NEWLINE try:NEWLINE start = actions.index(group._group_actions[0])NEWLINE except ValueError:NEWLINE continueNEWLINE else:NEWLINE end = start + len(group._group_actions)NEWLINE if actions[start:end] == group._group_actions:NEWLINE for action in group._group_actions:NEWLINE group_actions.add(action)NEWLINE if not group.required:NEWLINE if start in inserts:NEWLINE inserts[start] += ' ['NEWLINE else:NEWLINE inserts[start] = '['NEWLINE inserts[end] = ']'NEWLINE else:NEWLINE if start in inserts:NEWLINE inserts[start] += ' ('NEWLINE else:NEWLINE inserts[start] = '('NEWLINE inserts[end] = ')'NEWLINE for i in range(start + 1, end):NEWLINE inserts[i] = '|'NEWLINENEWLINE # collect all actions format stringsNEWLINE parts = []NEWLINE for i, action in enumerate(actions):NEWLINENEWLINE # suppressed arguments are marked with NoneNEWLINE # remove | separators for suppressed argumentsNEWLINE if action.help is SUPPRESS:NEWLINE parts.append(None)NEWLINE if inserts.get(i) == '|':NEWLINE inserts.pop(i)NEWLINE elif inserts.get(i + 1) == '|':NEWLINE inserts.pop(i + 1)NEWLINENEWLINE # produce all arg stringsNEWLINE elif not action.option_strings:NEWLINE part = self._format_args(action, action.dest)NEWLINENEWLINE # if it's in a group, strip the outer []NEWLINE if action in group_actions:NEWLINE if part[0] == '[' and part[-1] == ']':NEWLINE part = part[1:-1]NEWLINENEWLINE # add the action string to the listNEWLINE parts.append(part)NEWLINENEWLINE # produce the first way to invoke the option in bracketsNEWLINE else:NEWLINE option_string = action.option_strings[0]NEWLINENEWLINE # if the Optional doesn't take a value, format is:NEWLINE # -s or --longNEWLINE if action.nargs == 0:NEWLINE part = '%s' % option_stringNEWLINENEWLINE # if the Optional takes a value, format is:NEWLINE # -s ARGS or --long ARGSNEWLINE else:NEWLINE default = action.dest.upper()NEWLINE args_string = self._format_args(action, default)NEWLINE part = '%s %s' % (option_string, args_string)NEWLINENEWLINE # make it look optional if it's not required or in a groupNEWLINE if not action.required and action not in group_actions:NEWLINE part = '[%s]' % partNEWLINENEWLINE # add the action string to the listNEWLINE parts.append(part)NEWLINENEWLINE # insert things at the necessary indicesNEWLINE for i in sorted(inserts, reverse=True):NEWLINE parts[i:i] = [inserts[i]]NEWLINENEWLINE # join all the action items with spacesNEWLINE text = ' '.join([item for item in parts if item is not None])NEWLINENEWLINE # clean up separators for mutually exclusive groupsNEWLINE open = r'[\[(]'NEWLINE close = r'[\])]'NEWLINE text = _re.sub(r'(%s) ' % open, r'\1', text)NEWLINE text = _re.sub(r' (%s)' % close, r'\1', text)NEWLINE text = _re.sub(r'%s *%s' % (open, close), r'', text)NEWLINE text = _re.sub(r'\(([^|]*)\)', r'\1', text)NEWLINE text = text.strip()NEWLINENEWLINE # return the textNEWLINE return textNEWLINENEWLINE def _format_text(self, text):NEWLINE if '%(prog)' in text:NEWLINE text = text % dict(prog=self._prog)NEWLINE text_width = max(self._width - self._current_indent, 11)NEWLINE indent = ' ' * self._current_indentNEWLINE return self._fill_text(text, text_width, indent) + '\n\n'NEWLINENEWLINE def _format_action(self, action):NEWLINE # determine the required width and the entry labelNEWLINE help_position = min(self._action_max_length + 2,NEWLINE self._max_help_position)NEWLINE help_width = max(self._width - help_position, 11)NEWLINE action_width = help_position - self._current_indent - 2NEWLINE action_header = self._format_action_invocation(action)NEWLINENEWLINE # ho nelp; start on same line and add a final newlineNEWLINE if not action.help:NEWLINE tup = self._current_indent, '', action_headerNEWLINE action_header = '%*s%s\n' % tupNEWLINENEWLINE # short action name; start on the same line and pad two spacesNEWLINE elif len(action_header) <= action_width:NEWLINE tup = self._current_indent, '', action_width, action_headerNEWLINE action_header = '%*s%-*s ' % tupNEWLINE indent_first = 0NEWLINENEWLINE # long action name; start on the next lineNEWLINE else:NEWLINE tup = self._current_indent, '', action_headerNEWLINE action_header = '%*s%s\n' % tupNEWLINE indent_first = help_positionNEWLINENEWLINE # collect the pieces of the action helpNEWLINE parts = [action_header]NEWLINENEWLINE # if there was help for the action, add lines of help textNEWLINE if action.help:NEWLINE help_text = self._expand_help(action)NEWLINE help_lines = self._split_lines(help_text, help_width)NEWLINE parts.append('%*s%s\n' % (indent_first, '', help_lines[0]))NEWLINE for line in help_lines[1:]:NEWLINE parts.append('%*s%s\n' % (help_position, '', line))NEWLINENEWLINE # or add a newline if the description doesn't end with oneNEWLINE elif not action_header.endswith('\n'):NEWLINE parts.append('\n')NEWLINENEWLINE # if there are any sub-actions, add their help as wellNEWLINE for subaction in self._iter_indented_subactions(action):NEWLINE parts.append(self._format_action(subaction))NEWLINENEWLINE # return a single stringNEWLINE return self._join_parts(parts)NEWLINENEWLINE def _format_action_invocation(self, action):NEWLINE if not action.option_strings:NEWLINE metavar, = self._metavar_formatter(action, action.dest)(1)NEWLINE return metavarNEWLINENEWLINE else:NEWLINE parts = []NEWLINENEWLINE # if the Optional doesn't take a value, format is:NEWLINE # -s, --longNEWLINE if action.nargs == 0:NEWLINE parts.extend(action.option_strings)NEWLINENEWLINE # if the Optional takes a value, format is:NEWLINE # -s ARGS, --long ARGSNEWLINE else:NEWLINE default = action.dest.upper()NEWLINE args_string = self._format_args(action, default)NEWLINE for option_string in action.option_strings:NEWLINE parts.append('%s %s' % (option_string, args_string))NEWLINENEWLINE return ', '.join(parts)NEWLINENEWLINE def _metavar_formatter(self, action, default_metavar):NEWLINE if action.metavar is not None:NEWLINE result = action.metavarNEWLINE elif action.choices is not None:NEWLINE choice_strs = [str(choice) for choice in action.choices]NEWLINE result = '{%s}' % ','.join(choice_strs)NEWLINE else:NEWLINE result = default_metavarNEWLINENEWLINE def format(tuple_size):NEWLINE if isinstance(result, tuple):NEWLINE return resultNEWLINE else:NEWLINE return (result, ) * tuple_sizeNEWLINE return formatNEWLINENEWLINE def _format_args(self, action, default_metavar):NEWLINE get_metavar = self._metavar_formatter(action, default_metavar)NEWLINE if action.nargs is None:NEWLINE result = '%s' % get_metavar(1)NEWLINE elif action.nargs == OPTIONAL:NEWLINE result = '[%s]' % get_metavar(1)NEWLINE elif action.nargs == ZERO_OR_MORE:NEWLINE result = '[%s [%s ...]]' % get_metavar(2)NEWLINE elif action.nargs == ONE_OR_MORE:NEWLINE result = '%s [%s ...]' % get_metavar(2)NEWLINE elif action.nargs == REMAINDER:NEWLINE result = '...'NEWLINE elif action.nargs == PARSER:NEWLINE result = '%s ...' % get_metavar(1)NEWLINE else:NEWLINE formats = ['%s' for _ in range(action.nargs)]NEWLINE result = ' '.join(formats) % get_metavar(action.nargs)NEWLINE return resultNEWLINENEWLINE def _expand_help(self, action):NEWLINE params = dict(vars(action), prog=self._prog)NEWLINE for name in list(params):NEWLINE if params[name] is SUPPRESS:NEWLINE del params[name]NEWLINE for name in list(params):NEWLINE if hasattr(params[name], '__name__'):NEWLINE params[name] = params[name].__name__NEWLINE if params.get('choices') is not None:NEWLINE choices_str = ', '.join([str(c) for c in params['choices']])NEWLINE params['choices'] = choices_strNEWLINE return self._get_help_string(action) % paramsNEWLINENEWLINE def _iter_indented_subactions(self, action):NEWLINE try:NEWLINE get_subactions = action._get_subactionsNEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE self._indent()NEWLINE for subaction in get_subactions():NEWLINE yield subactionNEWLINE self._dedent()NEWLINENEWLINE def _split_lines(self, text, width):NEWLINE text = self._whitespace_matcher.sub(' ', text).strip()NEWLINE return _textwrap.wrap(text, width)NEWLINENEWLINE def _fill_text(self, text, width, indent):NEWLINE text = self._whitespace_matcher.sub(' ', text).strip()NEWLINE return _textwrap.fill(text, width, initial_indent=indent,NEWLINE subsequent_indent=indent)NEWLINENEWLINE def _get_help_string(self, action):NEWLINE return action.helpNEWLINENEWLINENEWLINEclass RawDescriptionHelpFormatter(HelpFormatter):NEWLINE """Help message formatter which retains any formatting in descriptions.NEWLINENEWLINE Only the name of this class is considered a public API. All the methodsNEWLINE provided by the class are considered an implementation detail.NEWLINE """NEWLINENEWLINE def _fill_text(self, text, width, indent):NEWLINE return ''.join([indent + line for line in text.splitlines(True)])NEWLINENEWLINENEWLINEclass RawTextHelpFormatter(RawDescriptionHelpFormatter):NEWLINE """Help message formatter which retains formatting of all help text.NEWLINENEWLINE Only the name of this class is considered a public API. All the methodsNEWLINE provided by the class are considered an implementation detail.NEWLINE """NEWLINENEWLINE def _split_lines(self, text, width):NEWLINE return text.splitlines()NEWLINENEWLINENEWLINEclass ArgumentDefaultsHelpFormatter(HelpFormatter):NEWLINE """Help message formatter which adds default values to argument help.NEWLINENEWLINE Only the name of this class is considered a public API. All the methodsNEWLINE provided by the class are considered an implementation detail.NEWLINE """NEWLINENEWLINE def _get_help_string(self, action):NEWLINE help = action.helpNEWLINE if '%(default)' not in action.help:NEWLINE if action.default is not SUPPRESS:NEWLINE defaulting_nargs = [OPTIONAL, ZERO_OR_MORE]NEWLINE if action.option_strings or action.nargs in defaulting_nargs:NEWLINE help += ' (default: %(default)s)'NEWLINE return helpNEWLINENEWLINENEWLINE# =====================NEWLINE# Options and ArgumentsNEWLINE# =====================NEWLINENEWLINEdef _get_action_name(argument):NEWLINE if argument is None:NEWLINE return NoneNEWLINE elif argument.option_strings:NEWLINE return '/'.join(argument.option_strings)NEWLINE elif argument.metavar not in (None, SUPPRESS):NEWLINE return argument.metavarNEWLINE elif argument.dest not in (None, SUPPRESS):NEWLINE return argument.destNEWLINE else:NEWLINE return NoneNEWLINENEWLINENEWLINEclass ArgumentError(Exception):NEWLINE """An error from creating or using an argument (optional or positional).NEWLINENEWLINE The string value of this exception is the message, augmented withNEWLINE information about the argument that caused it.NEWLINE """NEWLINENEWLINE def __init__(self, argument, message):NEWLINE self.argument_name = _get_action_name(argument)NEWLINE self.message = messageNEWLINENEWLINE def __str__(self):NEWLINE if self.argument_name is None:NEWLINE format = '%(message)s'NEWLINE else:NEWLINE format = 'argument %(argument_name)s: %(message)s'NEWLINE return format % dict(message=self.message,NEWLINE argument_name=self.argument_name)NEWLINENEWLINENEWLINEclass ArgumentTypeError(Exception):NEWLINE """An error from trying to convert a command line string to a type."""NEWLINE passNEWLINENEWLINENEWLINE# ==============NEWLINE# Action classesNEWLINE# ==============NEWLINENEWLINEclass Action(_AttributeHolder):NEWLINE """Information about how to convert command line strings to Python objects.NEWLINENEWLINE Action objects are used by an ArgumentParser to represent the informationNEWLINE needed to parse a single argument from one or more strings from theNEWLINE command line. The keyword arguments to the Action constructor are alsoNEWLINE all attributes of Action instances.NEWLINENEWLINE Keyword Arguments:NEWLINENEWLINE - option_strings -- A list of command-line option strings whichNEWLINE should be associated with this action.NEWLINENEWLINE - dest -- The name of the attribute to hold the created object(s)NEWLINENEWLINE - nargs -- The number of command-line arguments that should beNEWLINE consumed. By default, one argument will be consumed and a singleNEWLINE value will be produced. Other values include:NEWLINE - N (an integer) consumes N arguments (and produces a list)NEWLINE - '?' consumes zero or one argumentsNEWLINE - '*' consumes zero or more arguments (and produces a list)NEWLINE - '+' consumes one or more arguments (and produces a list)NEWLINE Note that the difference between the default and nargs=1 is thatNEWLINE with the default, a single value will be produced, while withNEWLINE nargs=1, a list containing a single value will be produced.NEWLINENEWLINE - const -- The value to be produced if the option is specified and theNEWLINE option uses an action that takes no values.NEWLINENEWLINE - default -- The value to be produced if the option is not specified.NEWLINENEWLINE - type -- A callable that accepts a single string argument, andNEWLINE returns the converted value. The standard Python types str, int,NEWLINE float, and complex are useful examples of such callables. If None,NEWLINE str is used.NEWLINENEWLINE - choices -- A container of values that should be allowed. If not None,NEWLINE after a command-line argument has been converted to the appropriateNEWLINE type, an exception will be raised if it is not a member of thisNEWLINE collection.NEWLINENEWLINE - required -- True if the action must always be specified at theNEWLINE command line. This is only meaningful for optional command-lineNEWLINE arguments.NEWLINENEWLINE - help -- The help string describing the argument.NEWLINENEWLINE - metavar -- The name to be used for the option's argument with theNEWLINE help string. If None, the 'dest' value will be used as the name.NEWLINE """NEWLINENEWLINE def __init__(self,NEWLINE option_strings,NEWLINE dest,NEWLINE nargs=None,NEWLINE const=None,NEWLINE default=None,NEWLINE type=None,NEWLINE choices=None,NEWLINE required=False,NEWLINE help=None,NEWLINE metavar=None):NEWLINE self.option_strings = option_stringsNEWLINE self.dest = destNEWLINE self.nargs = nargsNEWLINE self.const = constNEWLINE self.default = defaultNEWLINE self.type = typeNEWLINE self.choices = choicesNEWLINE self.required = requiredNEWLINE self.help = helpNEWLINE self.metavar = metavarNEWLINENEWLINE def _get_kwargs(self):NEWLINE names = [NEWLINE 'option_strings',NEWLINE 'dest',NEWLINE 'nargs',NEWLINE 'const',NEWLINE 'default',NEWLINE 'type',NEWLINE 'choices',NEWLINE 'help',NEWLINE 'metavar',NEWLINE ]NEWLINE return [(name, getattr(self, name)) for name in names]NEWLINENEWLINE def __call__(self, parser, namespace, values, option_string=None):NEWLINE raise NotImplementedError(_('.__call__() not defined'))NEWLINENEWLINENEWLINEclass _StoreAction(Action):NEWLINENEWLINE def __init__(self,NEWLINE option_strings,NEWLINE dest,NEWLINE nargs=None,NEWLINE const=None,NEWLINE default=None,NEWLINE type=None,NEWLINE choices=None,NEWLINE required=False,NEWLINE help=None,NEWLINE metavar=None):NEWLINE if nargs == 0:NEWLINE raise ValueError('nargs for store actions must be > 0; if you 'NEWLINE 'have nothing to store, actions such as store 'NEWLINE 'true or store const may be more appropriate')NEWLINE if const is not None and nargs != OPTIONAL:NEWLINE raise ValueError('nargs must be %r to supply const' % OPTIONAL)NEWLINE super(_StoreAction, self).__init__(NEWLINE option_strings=option_strings,NEWLINE dest=dest,NEWLINE nargs=nargs,NEWLINE const=const,NEWLINE default=default,NEWLINE type=type,NEWLINE choices=choices,NEWLINE required=required,NEWLINE help=help,NEWLINE metavar=metavar)NEWLINENEWLINE def __call__(self, parser, namespace, values, option_string=None):NEWLINE setattr(namespace, self.dest, values)NEWLINENEWLINENEWLINEclass _StoreConstAction(Action):NEWLINENEWLINE def __init__(self,NEWLINE option_strings,NEWLINE dest,NEWLINE const,NEWLINE default=None,NEWLINE required=False,NEWLINE help=None,NEWLINE metavar=None):NEWLINE super(_StoreConstAction, self).__init__(NEWLINE option_strings=option_strings,NEWLINE dest=dest,NEWLINE nargs=0,NEWLINE const=const,NEWLINE default=default,NEWLINE required=required,NEWLINE help=help)NEWLINENEWLINE def __call__(self, parser, namespace, values, option_string=None):NEWLINE setattr(namespace, self.dest, self.const)NEWLINENEWLINENEWLINEclass _StoreTrueAction(_StoreConstAction):NEWLINENEWLINE def __init__(self,NEWLINE option_strings,NEWLINE dest,NEWLINE default=False,NEWLINE required=False,NEWLINE help=None):NEWLINE super(_StoreTrueAction, self).__init__(NEWLINE option_strings=option_strings,NEWLINE dest=dest,NEWLINE const=True,NEWLINE default=default,NEWLINE required=required,NEWLINE help=help)NEWLINENEWLINENEWLINEclass _StoreFalseAction(_StoreConstAction):NEWLINENEWLINE def __init__(self,NEWLINE option_strings,NEWLINE dest,NEWLINE default=True,NEWLINE required=False,NEWLINE help=None):NEWLINE super(_StoreFalseAction, self).__init__(NEWLINE option_strings=option_strings,NEWLINE dest=dest,NEWLINE const=False,NEWLINE default=default,NEWLINE required=required,NEWLINE help=help)NEWLINENEWLINENEWLINEclass _AppendAction(Action):NEWLINENEWLINE def __init__(self,NEWLINE option_strings,NEWLINE dest,NEWLINE nargs=None,NEWLINE const=None,NEWLINE default=None,NEWLINE type=None,NEWLINE choices=None,NEWLINE required=False,NEWLINE help=None,NEWLINE metavar=None):NEWLINE if nargs == 0:NEWLINE raise ValueError('nargs for append actions must be > 0; if arg 'NEWLINE 'strings are not supplying the value to append, 'NEWLINE 'the append const action may be more appropriate')NEWLINE if const is not None and nargs != OPTIONAL:NEWLINE raise ValueError('nargs must be %r to supply const' % OPTIONAL)NEWLINE super(_AppendAction, self).__init__(NEWLINE option_strings=option_strings,NEWLINE dest=dest,NEWLINE nargs=nargs,NEWLINE const=const,NEWLINE default=default,NEWLINE type=type,NEWLINE choices=choices,NEWLINE required=required,NEWLINE help=help,NEWLINE metavar=metavar)NEWLINENEWLINE def __call__(self, parser, namespace, values, option_string=None):NEWLINE items = _copy.copy(_ensure_value(namespace, self.dest, []))NEWLINE items.append(values)NEWLINE setattr(namespace, self.dest, items)NEWLINENEWLINENEWLINEclass _AppendConstAction(Action):NEWLINENEWLINE def __init__(self,NEWLINE option_strings,NEWLINE dest,NEWLINE const,NEWLINE default=None,NEWLINE required=False,NEWLINE help=None,NEWLINE metavar=None):NEWLINE super(_AppendConstAction, self).__init__(NEWLINE option_strings=option_strings,NEWLINE dest=dest,NEWLINE nargs=0,NEWLINE const=const,NEWLINE default=default,NEWLINE required=required,NEWLINE help=help,NEWLINE metavar=metavar)NEWLINENEWLINE def __call__(self, parser, namespace, values, option_string=None):NEWLINE items = _copy.copy(_ensure_value(namespace, self.dest, []))NEWLINE items.append(self.const)NEWLINE setattr(namespace, self.dest, items)NEWLINENEWLINENEWLINEclass _CountAction(Action):NEWLINENEWLINE def __init__(self,NEWLINE option_strings,NEWLINE dest,NEWLINE default=None,NEWLINE required=False,NEWLINE help=None):NEWLINE super(_CountAction, self).__init__(NEWLINE option_strings=option_strings,NEWLINE dest=dest,NEWLINE nargs=0,NEWLINE default=default,NEWLINE required=required,NEWLINE help=help)NEWLINENEWLINE def __call__(self, parser, namespace, values, option_string=None):NEWLINE new_count = _ensure_value(namespace, self.dest, 0) + 1NEWLINE setattr(namespace, self.dest, new_count)NEWLINENEWLINENEWLINEclass _HelpAction(Action):NEWLINENEWLINE def __init__(self,NEWLINE option_strings,NEWLINE dest=SUPPRESS,NEWLINE default=SUPPRESS,NEWLINE help=None):NEWLINE super(_HelpAction, self).__init__(NEWLINE option_strings=option_strings,NEWLINE dest=dest,NEWLINE default=default,NEWLINE nargs=0,NEWLINE help=help)NEWLINENEWLINE def __call__(self, parser, namespace, values, option_string=None):NEWLINE parser.print_help()NEWLINE parser.exit()NEWLINENEWLINENEWLINEclass _VersionAction(Action):NEWLINENEWLINE def __init__(self,NEWLINE option_strings,NEWLINE version=None,NEWLINE dest=SUPPRESS,NEWLINE default=SUPPRESS,NEWLINE help="show program's version number and exit"):NEWLINE super(_VersionAction, self).__init__(NEWLINE option_strings=option_strings,NEWLINE dest=dest,NEWLINE default=default,NEWLINE nargs=0,NEWLINE help=help)NEWLINE self.version = versionNEWLINENEWLINE def __call__(self, parser, namespace, values, option_string=None):NEWLINE version = self.versionNEWLINE if version is None:NEWLINE version = parser.versionNEWLINE formatter = parser._get_formatter()NEWLINE formatter.add_text(version)NEWLINE parser.exit(message=formatter.format_help())NEWLINENEWLINENEWLINEclass _SubParsersAction(Action):NEWLINENEWLINE class _ChoicesPseudoAction(Action):NEWLINENEWLINE def __init__(self, name, help):NEWLINE sup = super(_SubParsersAction._ChoicesPseudoAction, self)NEWLINE sup.__init__(option_strings=[], dest=name, help=help)NEWLINENEWLINE def __init__(self,NEWLINE option_strings,NEWLINE prog,NEWLINE parser_class,NEWLINE dest=SUPPRESS,NEWLINE help=None,NEWLINE metavar=None):NEWLINENEWLINE self._prog_prefix = progNEWLINE self._parser_class = parser_classNEWLINE self._name_parser_map = _collections.OrderedDict()NEWLINE self._choices_actions = []NEWLINENEWLINE super(_SubParsersAction, self).__init__(NEWLINE option_strings=option_strings,NEWLINE dest=dest,NEWLINE nargs=PARSER,NEWLINE choices=self._name_parser_map,NEWLINE help=help,NEWLINE metavar=metavar)NEWLINENEWLINE def add_parser(self, name, **kwargs):NEWLINE # set prog from the existing prefixNEWLINE if kwargs.get('prog') is None:NEWLINE kwargs['prog'] = '%s %s' % (self._prog_prefix, name)NEWLINENEWLINE # create a pseudo-action to hold the choice helpNEWLINE if 'help' in kwargs:NEWLINE help = kwargs.pop('help')NEWLINE choice_action = self._ChoicesPseudoAction(name, help)NEWLINE self._choices_actions.append(choice_action)NEWLINENEWLINE # create the parser and add it to the mapNEWLINE parser = self._parser_class(**kwargs)NEWLINE self._name_parser_map[name] = parserNEWLINE return parserNEWLINENEWLINE def _get_subactions(self):NEWLINE return self._choices_actionsNEWLINENEWLINE def __call__(self, parser, namespace, values, option_string=None):NEWLINE parser_name = values[0]NEWLINE arg_strings = values[1:]NEWLINENEWLINE # set the parser name if requestedNEWLINE if self.dest is not SUPPRESS:NEWLINE setattr(namespace, self.dest, parser_name)NEWLINENEWLINE # select the parserNEWLINE try:NEWLINE parser = self._name_parser_map[parser_name]NEWLINE except KeyError:NEWLINE tup = parser_name, ', '.join(self._name_parser_map)NEWLINE msg = _('unknown parser %r (choices: %s)') % tupNEWLINE raise ArgumentError(self, msg)NEWLINENEWLINE # parse all the remaining options into the namespaceNEWLINE # store any unrecognized options on the object, so that the topNEWLINE # level parser can decide what to do with themNEWLINENEWLINE # In case this subparser defines new defaults, we parse themNEWLINE # in a new namespace object and then update the originalNEWLINE # namespace for the relevant parts.NEWLINE subnamespace, arg_strings = parser.parse_known_args(arg_strings, None)NEWLINE for key, value in vars(subnamespace).items():NEWLINE setattr(namespace, key, value)NEWLINENEWLINE if arg_strings:NEWLINE vars(namespace).setdefault(_UNRECOGNIZED_ARGS_ATTR, [])NEWLINE getattr(namespace, _UNRECOGNIZED_ARGS_ATTR).extend(arg_strings)NEWLINENEWLINENEWLINE# ==============NEWLINE# Type classesNEWLINE# ==============NEWLINENEWLINEclass FileType(object):NEWLINE """Factory for creating file object typesNEWLINENEWLINE Instances of FileType are typically passed as type= arguments to theNEWLINE ArgumentParser add_argument() method.NEWLINENEWLINE Keyword Arguments:NEWLINE - mode -- A string indicating how the file is to be opened. Accepts theNEWLINE same values as the builtin open() function.NEWLINE - bufsize -- The file's desired buffer size. Accepts the same values asNEWLINE the builtin open() function.NEWLINE """NEWLINENEWLINE def __init__(self, mode='r', bufsize=-1):NEWLINE self._mode = modeNEWLINE self._bufsize = bufsizeNEWLINENEWLINE def __call__(self, string):NEWLINE # the special argument "-" means sys.std{in,out}NEWLINE if string == '-':NEWLINE if 'r' in self._mode:NEWLINE return _sys.stdinNEWLINE elif 'w' in self._mode:NEWLINE return _sys.stdoutNEWLINE else:NEWLINE msg = _('argument "-" with mode %r') % self._modeNEWLINE raise ValueError(msg)NEWLINENEWLINE # all other arguments are used as file namesNEWLINE try:NEWLINE return open(string, self._mode, self._bufsize)NEWLINE except IOError as e:NEWLINE message = _("can't open '%s': %s")NEWLINE raise ArgumentTypeError(message % (string, e))NEWLINENEWLINE def __repr__(self):NEWLINE args = self._mode, self._bufsizeNEWLINE args_str = ', '.join(repr(arg) for arg in args if arg != -1)NEWLINE return '%s(%s)' % (type(self).__name__, args_str)NEWLINENEWLINE# ===========================NEWLINE# Optional and Positional ParsingNEWLINE# ===========================NEWLINENEWLINEclass Namespace(_AttributeHolder):NEWLINE """Simple object for storing attributes.NEWLINENEWLINE Implements equality by attribute names and values, and provides a simpleNEWLINE string representation.NEWLINE """NEWLINENEWLINE def __init__(self, **kwargs):NEWLINE for name in kwargs:NEWLINE setattr(self, name, kwargs[name])NEWLINENEWLINE __hash__ = NoneNEWLINENEWLINE def __eq__(self, other):NEWLINE if not isinstance(other, Namespace):NEWLINE return NotImplementedNEWLINE return vars(self) == vars(other)NEWLINENEWLINE def __ne__(self, other):NEWLINE if not isinstance(other, Namespace):NEWLINE return NotImplementedNEWLINE return not (self == other)NEWLINENEWLINE def __contains__(self, key):NEWLINE return key in self.__dict__NEWLINENEWLINENEWLINEclass _ActionsContainer(object):NEWLINENEWLINE def __init__(self,NEWLINE description,NEWLINE prefix_chars,NEWLINE argument_default,NEWLINE conflict_handler):NEWLINE super(_ActionsContainer, self).__init__()NEWLINENEWLINE self.description = descriptionNEWLINE self.argument_default = argument_defaultNEWLINE self.prefix_chars = prefix_charsNEWLINE self.conflict_handler = conflict_handlerNEWLINENEWLINE # set up registriesNEWLINE self._registries = {}NEWLINENEWLINE # register actionsNEWLINE self.register('action', None, _StoreAction)NEWLINE self.register('action', 'store', _StoreAction)NEWLINE self.register('action', 'store_const', _StoreConstAction)NEWLINE self.register('action', 'store_true', _StoreTrueAction)NEWLINE self.register('action', 'store_false', _StoreFalseAction)NEWLINE self.register('action', 'append', _AppendAction)NEWLINE self.register('action', 'append_const', _AppendConstAction)NEWLINE self.register('action', 'count', _CountAction)NEWLINE self.register('action', 'help', _HelpAction)NEWLINE self.register('action', 'version', _VersionAction)NEWLINE self.register('action', 'parsers', _SubParsersAction)NEWLINENEWLINE # raise an exception if the conflict handler is invalidNEWLINE self._get_handler()NEWLINENEWLINE # action storageNEWLINE self._actions = []NEWLINE self._option_string_actions = {}NEWLINENEWLINE # groupsNEWLINE self._action_groups = []NEWLINE self._mutually_exclusive_groups = []NEWLINENEWLINE # defaults storageNEWLINE self._defaults = {}NEWLINENEWLINE # determines whether an "option" looks like a negative numberNEWLINE self._negative_number_matcher = _re.compile(r'^-\d+$|^-\d*\.\d+$')NEWLINENEWLINE # whether or not there are any optionals that look like negativeNEWLINE # numbers -- uses a list so it can be shared and editedNEWLINE self._has_negative_number_optionals = []NEWLINENEWLINE # ====================NEWLINE # Registration methodsNEWLINE # ====================NEWLINE def register(self, registry_name, value, object):NEWLINE registry = self._registries.setdefault(registry_name, {})NEWLINE registry[value] = objectNEWLINENEWLINE def _registry_get(self, registry_name, value, default=None):NEWLINE return self._registries[registry_name].get(value, default)NEWLINENEWLINE # ==================================NEWLINE # Namespace default accessor methodsNEWLINE # ==================================NEWLINE def set_defaults(self, **kwargs):NEWLINE self._defaults.update(kwargs)NEWLINENEWLINE # if these defaults match any existing arguments, replaceNEWLINE # the previous default on the object with the new oneNEWLINE for action in self._actions:NEWLINE if action.dest in kwargs:NEWLINE action.default = kwargs[action.dest]NEWLINENEWLINE def get_default(self, dest):NEWLINE for action in self._actions:NEWLINE if action.dest == dest and action.default is not None:NEWLINE return action.defaultNEWLINE return self._defaults.get(dest, None)NEWLINENEWLINENEWLINE # =======================NEWLINE # Adding argument actionsNEWLINE # =======================NEWLINE def add_argument(self, *args, **kwargs):NEWLINE """NEWLINE add_argument(dest, ..., name=value, ...)NEWLINE add_argument(option_string, option_string, ..., name=value, ...)NEWLINE """NEWLINENEWLINE # if no positional args are supplied or only one is supplied andNEWLINE # it doesn't look like an option string, parse a positionalNEWLINE # argumentNEWLINE chars = self.prefix_charsNEWLINE if not args or len(args) == 1 and args[0][0] not in chars:NEWLINE if args and 'dest' in kwargs:NEWLINE raise ValueError('dest supplied twice for positional argument')NEWLINE kwargs = self._get_positional_kwargs(*args, **kwargs)NEWLINENEWLINE # otherwise, we're adding an optional argumentNEWLINE else:NEWLINE kwargs = self._get_optional_kwargs(*args, **kwargs)NEWLINENEWLINE # if no default was supplied, use the parser-level defaultNEWLINE if 'default' not in kwargs:NEWLINE dest = kwargs['dest']NEWLINE if dest in self._defaults:NEWLINE kwargs['default'] = self._defaults[dest]NEWLINE elif self.argument_default is not None:NEWLINE kwargs['default'] = self.argument_defaultNEWLINENEWLINE # create the action object, and add it to the parserNEWLINE action_class = self._pop_action_class(kwargs)NEWLINE if not _callable(action_class):NEWLINE raise ValueError('unknown action "%s"' % (action_class,))NEWLINE action = action_class(**kwargs)NEWLINENEWLINE # raise an error if the action type is not callableNEWLINE type_func = self._registry_get('type', action.type, action.type)NEWLINE if not _callable(type_func):NEWLINE raise ValueError('%r is not callable' % (type_func,))NEWLINENEWLINE # raise an error if the metavar does not match the typeNEWLINE if hasattr(self, "_get_formatter"):NEWLINE try:NEWLINE self._get_formatter()._format_args(action, None)NEWLINE except TypeError:NEWLINE raise ValueError("length of metavar tuple does not match nargs")NEWLINENEWLINE return self._add_action(action)NEWLINENEWLINE def add_argument_group(self, *args, **kwargs):NEWLINE group = _ArgumentGroup(self, *args, **kwargs)NEWLINE self._action_groups.append(group)NEWLINE return groupNEWLINENEWLINE def add_mutually_exclusive_group(self, **kwargs):NEWLINE group = _MutuallyExclusiveGroup(self, **kwargs)NEWLINE self._mutually_exclusive_groups.append(group)NEWLINE return groupNEWLINENEWLINE def _add_action(self, action):NEWLINE # resolve any conflictsNEWLINE self._check_conflict(action)NEWLINENEWLINE # add to actions listNEWLINE self._actions.append(action)NEWLINE action.container = selfNEWLINENEWLINE # index the action by any option strings it hasNEWLINE for option_string in action.option_strings:NEWLINE self._option_string_actions[option_string] = actionNEWLINENEWLINE # set the flag if any option strings look like negative numbersNEWLINE for option_string in action.option_strings:NEWLINE if self._negative_number_matcher.match(option_string):NEWLINE if not self._has_negative_number_optionals:NEWLINE self._has_negative_number_optionals.append(True)NEWLINENEWLINE # return the created actionNEWLINE return actionNEWLINENEWLINE def _remove_action(self, action):NEWLINE self._actions.remove(action)NEWLINENEWLINE def _add_container_actions(self, container):NEWLINE # collect groups by titlesNEWLINE title_group_map = {}NEWLINE for group in self._action_groups:NEWLINE if group.title in title_group_map:NEWLINE msg = _('cannot merge actions - two groups are named %r')NEWLINE raise ValueError(msg % (group.title))NEWLINE title_group_map[group.title] = groupNEWLINENEWLINE # map each action to its groupNEWLINE group_map = {}NEWLINE for group in container._action_groups:NEWLINENEWLINE # if a group with the title exists, use that, otherwiseNEWLINE # create a new group matching the container's groupNEWLINE if group.title not in title_group_map:NEWLINE title_group_map[group.title] = self.add_argument_group(NEWLINE title=group.title,NEWLINE description=group.description,NEWLINE conflict_handler=group.conflict_handler)NEWLINENEWLINE # map the actions to their new groupNEWLINE for action in group._group_actions:NEWLINE group_map[action] = title_group_map[group.title]NEWLINENEWLINE # add container's mutually exclusive groupsNEWLINE # NOTE: if add_mutually_exclusive_group ever gains title= andNEWLINE # description= then this code will need to be expanded as aboveNEWLINE for group in container._mutually_exclusive_groups:NEWLINE mutex_group = self.add_mutually_exclusive_group(NEWLINE required=group.required)NEWLINENEWLINE # map the actions to their new mutex groupNEWLINE for action in group._group_actions:NEWLINE group_map[action] = mutex_groupNEWLINENEWLINE # add all actions to this container or their groupNEWLINE for action in container._actions:NEWLINE group_map.get(action, self)._add_action(action)NEWLINENEWLINE def _get_positional_kwargs(self, dest, **kwargs):NEWLINE # make sure required is not specifiedNEWLINE if 'required' in kwargs:NEWLINE msg = _("'required' is an invalid argument for positionals")NEWLINE raise TypeError(msg)NEWLINENEWLINE # mark positional arguments as required if at least one isNEWLINE # always requiredNEWLINE if kwargs.get('nargs') not in [OPTIONAL, ZERO_OR_MORE]:NEWLINE kwargs['required'] = TrueNEWLINE if kwargs.get('nargs') == ZERO_OR_MORE and 'default' not in kwargs:NEWLINE kwargs['required'] = TrueNEWLINENEWLINE # return the keyword arguments with no option stringsNEWLINE return dict(kwargs, dest=dest, option_strings=[])NEWLINENEWLINE def _get_optional_kwargs(self, *args, **kwargs):NEWLINE # determine short and long option stringsNEWLINE option_strings = []NEWLINE long_option_strings = []NEWLINE for option_string in args:NEWLINE # error on strings that don't start with an appropriate prefixNEWLINE if not option_string[0] in self.prefix_chars:NEWLINE msg = _('invalid option string %r: 'NEWLINE 'must start with a character %r')NEWLINE tup = option_string, self.prefix_charsNEWLINE raise ValueError(msg % tup)NEWLINENEWLINE # strings starting with two prefix characters are long optionsNEWLINE option_strings.append(option_string)NEWLINE if option_string[0] in self.prefix_chars:NEWLINE if len(option_string) > 1:NEWLINE if option_string[1] in self.prefix_chars:NEWLINE long_option_strings.append(option_string)NEWLINENEWLINE # infer destination, '--foo-bar' -> 'foo_bar' and '-x' -> 'x'NEWLINE dest = kwargs.pop('dest', None)NEWLINE if dest is None:NEWLINE if long_option_strings:NEWLINE dest_option_string = long_option_strings[0]NEWLINE else:NEWLINE dest_option_string = option_strings[0]NEWLINE dest = dest_option_string.lstrip(self.prefix_chars)NEWLINE if not dest:NEWLINE msg = _('dest= is required for options like %r')NEWLINE raise ValueError(msg % option_string)NEWLINE dest = dest.replace('-', '_')NEWLINENEWLINE # return the updated keyword argumentsNEWLINE return dict(kwargs, dest=dest, option_strings=option_strings)NEWLINENEWLINE def _pop_action_class(self, kwargs, default=None):NEWLINE action = kwargs.pop('action', default)NEWLINE return self._registry_get('action', action, action)NEWLINENEWLINE def _get_handler(self):NEWLINE # determine function from conflict handler stringNEWLINE handler_func_name = '_handle_conflict_%s' % self.conflict_handlerNEWLINE try:NEWLINE return getattr(self, handler_func_name)NEWLINE except AttributeError:NEWLINE msg = _('invalid conflict_resolution value: %r')NEWLINE raise ValueError(msg % self.conflict_handler)NEWLINENEWLINE def _check_conflict(self, action):NEWLINENEWLINE # find all options that conflict with this optionNEWLINE confl_optionals = []NEWLINE for option_string in action.option_strings:NEWLINE if option_string in self._option_string_actions:NEWLINE confl_optional = self._option_string_actions[option_string]NEWLINE confl_optionals.append((option_string, confl_optional))NEWLINENEWLINE # resolve any conflictsNEWLINE if confl_optionals:NEWLINE conflict_handler = self._get_handler()NEWLINE conflict_handler(action, confl_optionals)NEWLINENEWLINE def _handle_conflict_error(self, action, conflicting_actions):NEWLINE message = _('conflicting option string(s): %s')NEWLINE conflict_string = ', '.join([option_stringNEWLINE for option_string, actionNEWLINE in conflicting_actions])NEWLINE raise ArgumentError(action, message % conflict_string)NEWLINENEWLINE def _handle_conflict_resolve(self, action, conflicting_actions):NEWLINENEWLINE # remove all conflicting optionsNEWLINE for option_string, action in conflicting_actions:NEWLINENEWLINE # remove the conflicting optionNEWLINE action.option_strings.remove(option_string)NEWLINE self._option_string_actions.pop(option_string, None)NEWLINENEWLINE # if the option now has no option string, remove it from theNEWLINE # container holding itNEWLINE if not action.option_strings:NEWLINE action.container._remove_action(action)NEWLINENEWLINENEWLINEclass _ArgumentGroup(_ActionsContainer):NEWLINENEWLINE def __init__(self, container, title=None, description=None, **kwargs):NEWLINE # add any missing keyword arguments by checking the containerNEWLINE update = kwargs.setdefaultNEWLINE update('conflict_handler', container.conflict_handler)NEWLINE update('prefix_chars', container.prefix_chars)NEWLINE update('argument_default', container.argument_default)NEWLINE super_init = super(_ArgumentGroup, self).__init__NEWLINE super_init(description=description, **kwargs)NEWLINENEWLINE # group attributesNEWLINE self.title = titleNEWLINE self._group_actions = []NEWLINENEWLINE # share most attributes with the containerNEWLINE self._registries = container._registriesNEWLINE self._actions = container._actionsNEWLINE self._option_string_actions = container._option_string_actionsNEWLINE self._defaults = container._defaultsNEWLINE self._has_negative_number_optionals = \NEWLINE container._has_negative_number_optionalsNEWLINE self._mutually_exclusive_groups = container._mutually_exclusive_groupsNEWLINENEWLINE def _add_action(self, action):NEWLINE action = super(_ArgumentGroup, self)._add_action(action)NEWLINE self._group_actions.append(action)NEWLINE return actionNEWLINENEWLINE def _remove_action(self, action):NEWLINE super(_ArgumentGroup, self)._remove_action(action)NEWLINE self._group_actions.remove(action)NEWLINENEWLINENEWLINEclass _MutuallyExclusiveGroup(_ArgumentGroup):NEWLINENEWLINE def __init__(self, container, required=False):NEWLINE super(_MutuallyExclusiveGroup, self).__init__(container)NEWLINE self.required = requiredNEWLINE self._container = containerNEWLINENEWLINE def _add_action(self, action):NEWLINE if action.required:NEWLINE msg = _('mutually exclusive arguments must be optional')NEWLINE raise ValueError(msg)NEWLINE action = self._container._add_action(action)NEWLINE self._group_actions.append(action)NEWLINE return actionNEWLINENEWLINE def _remove_action(self, action):NEWLINE self._container._remove_action(action)NEWLINE self._group_actions.remove(action)NEWLINENEWLINENEWLINEclass ArgumentParser(_AttributeHolder, _ActionsContainer):NEWLINE """Object for parsing command line strings into Python objects.NEWLINENEWLINE Keyword Arguments:NEWLINE - prog -- The name of the program (default: sys.argv[0])NEWLINE - usage -- A usage message (default: auto-generated from arguments)NEWLINE - description -- A description of what the program doesNEWLINE - epilog -- Text following the argument descriptionsNEWLINE - parents -- Parsers whose arguments should be copied into this oneNEWLINE - formatter_class -- HelpFormatter class for printing help messagesNEWLINE - prefix_chars -- Characters that prefix optional argumentsNEWLINE - fromfile_prefix_chars -- Characters that prefix files containingNEWLINE additional argumentsNEWLINE - argument_default -- The default value for all argumentsNEWLINE - conflict_handler -- String indicating how to handle conflictsNEWLINE - add_help -- Add a -h/-help optionNEWLINE """NEWLINENEWLINE def __init__(self,NEWLINE prog=None,NEWLINE usage=None,NEWLINE description=None,NEWLINE epilog=None,NEWLINE version=None,NEWLINE parents=[],NEWLINE formatter_class=HelpFormatter,NEWLINE prefix_chars='-',NEWLINE fromfile_prefix_chars=None,NEWLINE argument_default=None,NEWLINE conflict_handler='error',NEWLINE add_help=True):NEWLINENEWLINE if version is not None:NEWLINE import warningsNEWLINE warnings.warn(NEWLINE """The "version" argument to ArgumentParser is deprecated. """NEWLINE """Please use """NEWLINE """"add_argument(..., action='version', version="N", ...)" """NEWLINE """instead""", DeprecationWarning)NEWLINENEWLINE superinit = super(ArgumentParser, self).__init__NEWLINE superinit(description=description,NEWLINE prefix_chars=prefix_chars,NEWLINE argument_default=argument_default,NEWLINE conflict_handler=conflict_handler)NEWLINENEWLINE # default setting for progNEWLINE if prog is None:NEWLINE prog = _os.path.basename(_sys.argv[0])NEWLINENEWLINE self.prog = progNEWLINE self.usage = usageNEWLINE self.epilog = epilogNEWLINE self.version = versionNEWLINE self.formatter_class = formatter_classNEWLINE self.fromfile_prefix_chars = fromfile_prefix_charsNEWLINE self.add_help = add_helpNEWLINENEWLINE add_group = self.add_argument_groupNEWLINE self._positionals = add_group(_('positional arguments'))NEWLINE self._optionals = add_group(_('optional arguments'))NEWLINE self._subparsers = NoneNEWLINENEWLINE # register typesNEWLINE def identity(string):NEWLINE return stringNEWLINE self.register('type', None, identity)NEWLINENEWLINE # add help and version arguments if necessaryNEWLINE # (using explicit default to override global argument_default)NEWLINE default_prefix = '-' if '-' in prefix_chars else prefix_chars[0]NEWLINE if self.add_help:NEWLINE self.add_argument(NEWLINE default_prefix+'h', default_prefix*2+'help',NEWLINE action='help', default=SUPPRESS,NEWLINE help=_('show this help message and exit'))NEWLINE if self.version:NEWLINE self.add_argument(NEWLINE default_prefix+'v', default_prefix*2+'version',NEWLINE action='version', default=SUPPRESS,NEWLINE version=self.version,NEWLINE help=_("show program's version number and exit"))NEWLINENEWLINE # add parent arguments and defaultsNEWLINE for parent in parents:NEWLINE self._add_container_actions(parent)NEWLINE try:NEWLINE defaults = parent._defaultsNEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE self._defaults.update(defaults)NEWLINENEWLINE # =======================NEWLINE # Pretty __repr__ methodsNEWLINE # =======================NEWLINE def _get_kwargs(self):NEWLINE names = [NEWLINE 'prog',NEWLINE 'usage',NEWLINE 'description',NEWLINE 'version',NEWLINE 'formatter_class',NEWLINE 'conflict_handler',NEWLINE 'add_help',NEWLINE ]NEWLINE return [(name, getattr(self, name)) for name in names]NEWLINENEWLINE # ==================================NEWLINE # Optional/Positional adding methodsNEWLINE # ==================================NEWLINE def add_subparsers(self, **kwargs):NEWLINE if self._subparsers is not None:NEWLINE self.error(_('cannot have multiple subparser arguments'))NEWLINENEWLINE # add the parser class to the arguments if it's not presentNEWLINE kwargs.setdefault('parser_class', type(self))NEWLINENEWLINE if 'title' in kwargs or 'description' in kwargs:NEWLINE title = _(kwargs.pop('title', 'subcommands'))NEWLINE description = _(kwargs.pop('description', None))NEWLINE self._subparsers = self.add_argument_group(title, description)NEWLINE else:NEWLINE self._subparsers = self._positionalsNEWLINENEWLINE # prog defaults to the usage message of this parser, skippingNEWLINE # optional arguments and with no "usage:" prefixNEWLINE if kwargs.get('prog') is None:NEWLINE formatter = self._get_formatter()NEWLINE positionals = self._get_positional_actions()NEWLINE groups = self._mutually_exclusive_groupsNEWLINE formatter.add_usage(self.usage, positionals, groups, '')NEWLINE kwargs['prog'] = formatter.format_help().strip()NEWLINENEWLINE # create the parsers action and add it to the positionals listNEWLINE parsers_class = self._pop_action_class(kwargs, 'parsers')NEWLINE action = parsers_class(option_strings=[], **kwargs)NEWLINE self._subparsers._add_action(action)NEWLINENEWLINE # return the created parsers actionNEWLINE return actionNEWLINENEWLINE def _add_action(self, action):NEWLINE if action.option_strings:NEWLINE self._optionals._add_action(action)NEWLINE else:NEWLINE self._positionals._add_action(action)NEWLINE return actionNEWLINENEWLINE def _get_optional_actions(self):NEWLINE return [actionNEWLINE for action in self._actionsNEWLINE if action.option_strings]NEWLINENEWLINE def _get_positional_actions(self):NEWLINE return [actionNEWLINE for action in self._actionsNEWLINE if not action.option_strings]NEWLINENEWLINE # =====================================NEWLINE # Command line argument parsing methodsNEWLINE # =====================================NEWLINE def parse_args(self, args=None, namespace=None):NEWLINE args, argv = self.parse_known_args(args, namespace)NEWLINE if argv:NEWLINE msg = _('unrecognized arguments: %s')NEWLINE self.error(msg % ' '.join(argv))NEWLINE return argsNEWLINENEWLINE def parse_known_args(self, args=None, namespace=None):NEWLINE if args is None:NEWLINE # args default to the system argsNEWLINE args = _sys.argv[1:]NEWLINE else:NEWLINE # make sure that args are mutableNEWLINE args = list(args)NEWLINENEWLINE # default Namespace built from parser defaultsNEWLINE if namespace is None:NEWLINE namespace = Namespace()NEWLINENEWLINE # add any action defaults that aren't presentNEWLINE for action in self._actions:NEWLINE if action.dest is not SUPPRESS:NEWLINE if not hasattr(namespace, action.dest):NEWLINE if action.default is not SUPPRESS:NEWLINE setattr(namespace, action.dest, action.default)NEWLINENEWLINE # add any parser defaults that aren't presentNEWLINE for dest in self._defaults:NEWLINE if not hasattr(namespace, dest):NEWLINE setattr(namespace, dest, self._defaults[dest])NEWLINENEWLINE # parse the arguments and exit if there are any errorsNEWLINE try:NEWLINE namespace, args = self._parse_known_args(args, namespace)NEWLINE if hasattr(namespace, _UNRECOGNIZED_ARGS_ATTR):NEWLINE args.extend(getattr(namespace, _UNRECOGNIZED_ARGS_ATTR))NEWLINE delattr(namespace, _UNRECOGNIZED_ARGS_ATTR)NEWLINE return namespace, argsNEWLINE except ArgumentError:NEWLINE err = _sys.exc_info()[1]NEWLINE self.error(str(err))NEWLINENEWLINE def _parse_known_args(self, arg_strings, namespace):NEWLINE # replace arg strings that are file referencesNEWLINE if self.fromfile_prefix_chars is not None:NEWLINE arg_strings = self._read_args_from_files(arg_strings)NEWLINENEWLINE # map all mutually exclusive arguments to the other argumentsNEWLINE # they can't occur withNEWLINE action_conflicts = {}NEWLINE for mutex_group in self._mutually_exclusive_groups:NEWLINE group_actions = mutex_group._group_actionsNEWLINE for i, mutex_action in enumerate(mutex_group._group_actions):NEWLINE conflicts = action_conflicts.setdefault(mutex_action, [])NEWLINE conflicts.extend(group_actions[:i])NEWLINE conflicts.extend(group_actions[i + 1:])NEWLINENEWLINE # find all option indices, and determine the arg_string_patternNEWLINE # which has an 'O' if there is an option at an index,NEWLINE # an 'A' if there is an argument, or a '-' if there is a '--'NEWLINE option_string_indices = {}NEWLINE arg_string_pattern_parts = []NEWLINE arg_strings_iter = iter(arg_strings)NEWLINE for i, arg_string in enumerate(arg_strings_iter):NEWLINENEWLINE # all args after -- are non-optionsNEWLINE if arg_string == '--':NEWLINE arg_string_pattern_parts.append('-')NEWLINE for arg_string in arg_strings_iter:NEWLINE arg_string_pattern_parts.append('A')NEWLINENEWLINE # otherwise, add the arg to the arg stringsNEWLINE # and note the index if it was an optionNEWLINE else:NEWLINE option_tuple = self._parse_optional(arg_string)NEWLINE if option_tuple is None:NEWLINE pattern = 'A'NEWLINE else:NEWLINE option_string_indices[i] = option_tupleNEWLINE pattern = 'O'NEWLINE arg_string_pattern_parts.append(pattern)NEWLINENEWLINE # join the pieces together to form the patternNEWLINE arg_strings_pattern = ''.join(arg_string_pattern_parts)NEWLINENEWLINE # converts arg strings to the appropriate and then takes the actionNEWLINE seen_actions = set()NEWLINE seen_non_default_actions = set()NEWLINENEWLINE def take_action(action, argument_strings, option_string=None):NEWLINE seen_actions.add(action)NEWLINE argument_values = self._get_values(action, argument_strings)NEWLINENEWLINE # error if this argument is not allowed with other previouslyNEWLINE # seen arguments, assuming that actions that use the defaultNEWLINE # value don't really count as "present"NEWLINE if argument_values is not action.default:NEWLINE seen_non_default_actions.add(action)NEWLINE for conflict_action in action_conflicts.get(action, []):NEWLINE if conflict_action in seen_non_default_actions:NEWLINE msg = _('not allowed with argument %s')NEWLINE action_name = _get_action_name(conflict_action)NEWLINE raise ArgumentError(action, msg % action_name)NEWLINENEWLINE # take the action if we didn't receive a SUPPRESS valueNEWLINE # (e.g. from a default)NEWLINE if argument_values is not SUPPRESS:NEWLINE action(self, namespace, argument_values, option_string)NEWLINENEWLINE # function to convert arg_strings into an optional actionNEWLINE def consume_optional(start_index):NEWLINENEWLINE # get the optional identified at this indexNEWLINE option_tuple = option_string_indices[start_index]NEWLINE action, option_string, explicit_arg = option_tupleNEWLINENEWLINE # identify additional optionals in the same arg stringNEWLINE # (e.g. -xyz is the same as -x -y -z if no args are required)NEWLINE match_argument = self._match_argumentNEWLINE action_tuples = []NEWLINE while True:NEWLINENEWLINE # if we found no optional action, skip itNEWLINE if action is None:NEWLINE extras.append(arg_strings[start_index])NEWLINE return start_index + 1NEWLINENEWLINE # if there is an explicit argument, try to match theNEWLINE # optional's string arguments to only thisNEWLINE if explicit_arg is not None:NEWLINE arg_count = match_argument(action, 'A')NEWLINENEWLINE # if the action is a single-dash option and takes noNEWLINE # arguments, try to parse more single-dash options outNEWLINE # of the tail of the option stringNEWLINE chars = self.prefix_charsNEWLINE if arg_count == 0 and option_string[1] not in chars:NEWLINE action_tuples.append((action, [], option_string))NEWLINE char = option_string[0]NEWLINE option_string = char + explicit_arg[0]NEWLINE new_explicit_arg = explicit_arg[1:] or NoneNEWLINE optionals_map = self._option_string_actionsNEWLINE if option_string in optionals_map:NEWLINE action = optionals_map[option_string]NEWLINE explicit_arg = new_explicit_argNEWLINE else:NEWLINE msg = _('ignored explicit argument %r')NEWLINE raise ArgumentError(action, msg % explicit_arg)NEWLINENEWLINE # if the action expect exactly one argument, we'veNEWLINE # successfully matched the option; exit the loopNEWLINE elif arg_count == 1:NEWLINE stop = start_index + 1NEWLINE args = [explicit_arg]NEWLINE action_tuples.append((action, args, option_string))NEWLINE breakNEWLINENEWLINE # error if a double-dash option did not use theNEWLINE # explicit argumentNEWLINE else:NEWLINE msg = _('ignored explicit argument %r')NEWLINE raise ArgumentError(action, msg % explicit_arg)NEWLINENEWLINE # if there is no explicit argument, try to match theNEWLINE # optional's string arguments with the following stringsNEWLINE # if successful, exit the loopNEWLINE else:NEWLINE start = start_index + 1NEWLINE selected_patterns = arg_strings_pattern[start:]NEWLINE arg_count = match_argument(action, selected_patterns)NEWLINE stop = start + arg_countNEWLINE args = arg_strings[start:stop]NEWLINE action_tuples.append((action, args, option_string))NEWLINE breakNEWLINENEWLINE # add the Optional to the list and return the index at whichNEWLINE # the Optional's string args stoppedNEWLINE assert action_tuplesNEWLINE for action, args, option_string in action_tuples:NEWLINE take_action(action, args, option_string)NEWLINE return stopNEWLINENEWLINE # the list of Positionals left to be parsed; this is modifiedNEWLINE # by consume_positionals()NEWLINE positionals = self._get_positional_actions()NEWLINENEWLINE # function to convert arg_strings into positional actionsNEWLINE def consume_positionals(start_index):NEWLINE # match as many Positionals as possibleNEWLINE match_partial = self._match_arguments_partialNEWLINE selected_pattern = arg_strings_pattern[start_index:]NEWLINE arg_counts = match_partial(positionals, selected_pattern)NEWLINENEWLINE # slice off the appropriate arg strings for each PositionalNEWLINE # and add the Positional and its args to the listNEWLINE for action, arg_count in zip(positionals, arg_counts):NEWLINE args = arg_strings[start_index: start_index + arg_count]NEWLINE start_index += arg_countNEWLINE take_action(action, args)NEWLINENEWLINE # slice off the Positionals that we just parsed and return theNEWLINE # index at which the Positionals' string args stoppedNEWLINE positionals[:] = positionals[len(arg_counts):]NEWLINE return start_indexNEWLINENEWLINE # consume Positionals and Optionals alternately, until we haveNEWLINE # passed the last option stringNEWLINE extras = []NEWLINE start_index = 0NEWLINE if option_string_indices:NEWLINE max_option_string_index = max(option_string_indices)NEWLINE else:NEWLINE max_option_string_index = -1NEWLINE while start_index <= max_option_string_index:NEWLINENEWLINE # consume any Positionals preceding the next optionNEWLINE next_option_string_index = min([NEWLINE indexNEWLINE for index in option_string_indicesNEWLINE if index >= start_index])NEWLINE if start_index != next_option_string_index:NEWLINE positionals_end_index = consume_positionals(start_index)NEWLINENEWLINE # only try to parse the next optional if we didn't consumeNEWLINE # the option string during the positionals parsingNEWLINE if positionals_end_index > start_index:NEWLINE start_index = positionals_end_indexNEWLINE continueNEWLINE else:NEWLINE start_index = positionals_end_indexNEWLINENEWLINE # if we consumed all the positionals we could and we're notNEWLINE # at the index of an option string, there were extra argumentsNEWLINE if start_index not in option_string_indices:NEWLINE strings = arg_strings[start_index:next_option_string_index]NEWLINE extras.extend(strings)NEWLINE start_index = next_option_string_indexNEWLINENEWLINE # consume the next optional and any arguments for itNEWLINE start_index = consume_optional(start_index)NEWLINENEWLINE # consume any positionals following the last OptionalNEWLINE stop_index = consume_positionals(start_index)NEWLINENEWLINE # if we didn't consume all the argument strings, there were extrasNEWLINE extras.extend(arg_strings[stop_index:])NEWLINENEWLINE # if we didn't use all the Positional objects, there were too fewNEWLINE # arg strings supplied.NEWLINE if positionals:NEWLINE self.error(_('too few arguments'))NEWLINENEWLINE # make sure all required actions were present, and convert defaults.NEWLINE for action in self._actions:NEWLINE if action not in seen_actions:NEWLINE if action.required:NEWLINE name = _get_action_name(action)NEWLINE self.error(_('argument %s is required') % name)NEWLINE else:NEWLINE # Convert action default now instead of doing it beforeNEWLINE # parsing arguments to avoid calling convert functionsNEWLINE # twice (which may fail) if the argument was given, butNEWLINE # only if it was defined already in the namespaceNEWLINE if (action.default is not None andNEWLINE isinstance(action.default, basestring) andNEWLINE hasattr(namespace, action.dest) andNEWLINE action.default is getattr(namespace, action.dest)):NEWLINE setattr(namespace, action.dest,NEWLINE self._get_value(action, action.default))NEWLINENEWLINE # make sure all required groups had one option presentNEWLINE for group in self._mutually_exclusive_groups:NEWLINE if group.required:NEWLINE for action in group._group_actions:NEWLINE if action in seen_non_default_actions:NEWLINE breakNEWLINENEWLINE # if no actions were used, report the errorNEWLINE else:NEWLINE names = [_get_action_name(action)NEWLINE for action in group._group_actionsNEWLINE if action.help is not SUPPRESS]NEWLINE msg = _('one of the arguments %s is required')NEWLINE self.error(msg % ' '.join(names))NEWLINENEWLINE # return the updated namespace and the extra argumentsNEWLINE return namespace, extrasNEWLINENEWLINE def _read_args_from_files(self, arg_strings):NEWLINE # expand arguments referencing filesNEWLINE new_arg_strings = []NEWLINE for arg_string in arg_strings:NEWLINENEWLINE # for regular arguments, just add them back into the listNEWLINE if not arg_string or arg_string[0] not in self.fromfile_prefix_chars:NEWLINE new_arg_strings.append(arg_string)NEWLINENEWLINE # replace arguments referencing files with the file contentNEWLINE else:NEWLINE try:NEWLINE args_file = open(arg_string[1:])NEWLINE try:NEWLINE arg_strings = []NEWLINE for arg_line in args_file.read().splitlines():NEWLINE for arg in self.convert_arg_line_to_args(arg_line):NEWLINE arg_strings.append(arg)NEWLINE arg_strings = self._read_args_from_files(arg_strings)NEWLINE new_arg_strings.extend(arg_strings)NEWLINE finally:NEWLINE args_file.close()NEWLINE except IOError:NEWLINE err = _sys.exc_info()[1]NEWLINE self.error(str(err))NEWLINENEWLINE # return the modified argument listNEWLINE return new_arg_stringsNEWLINENEWLINE def convert_arg_line_to_args(self, arg_line):NEWLINE return [arg_line]NEWLINENEWLINE def _match_argument(self, action, arg_strings_pattern):NEWLINE # match the pattern for this action to the arg stringsNEWLINE nargs_pattern = self._get_nargs_pattern(action)NEWLINE match = _re.match(nargs_pattern, arg_strings_pattern)NEWLINENEWLINE # raise an exception if we weren't able to find a matchNEWLINE if match is None:NEWLINE nargs_errors = {NEWLINE None: _('expected one argument'),NEWLINE OPTIONAL: _('expected at most one argument'),NEWLINE ONE_OR_MORE: _('expected at least one argument'),NEWLINE }NEWLINE default = _('expected %s argument(s)') % action.nargsNEWLINE msg = nargs_errors.get(action.nargs, default)NEWLINE raise ArgumentError(action, msg)NEWLINENEWLINE # return the number of arguments matchedNEWLINE return len(match.group(1))NEWLINENEWLINE def _match_arguments_partial(self, actions, arg_strings_pattern):NEWLINE # progressively shorten the actions list by slicing off theNEWLINE # final actions until we find a matchNEWLINE result = []NEWLINE for i in range(len(actions), 0, -1):NEWLINE actions_slice = actions[:i]NEWLINE pattern = ''.join([self._get_nargs_pattern(action)NEWLINE for action in actions_slice])NEWLINE match = _re.match(pattern, arg_strings_pattern)NEWLINE if match is not None:NEWLINE result.extend([len(string) for string in match.groups()])NEWLINE breakNEWLINENEWLINE # return the list of arg string countsNEWLINE return resultNEWLINENEWLINE def _parse_optional(self, arg_string):NEWLINE # if it's an empty string, it was meant to be a positionalNEWLINE if not arg_string:NEWLINE return NoneNEWLINENEWLINE # if it doesn't start with a prefix, it was meant to be positionalNEWLINE if not arg_string[0] in self.prefix_chars:NEWLINE return NoneNEWLINENEWLINE # if the option string is present in the parser, return the actionNEWLINE if arg_string in self._option_string_actions:NEWLINE action = self._option_string_actions[arg_string]NEWLINE return action, arg_string, NoneNEWLINENEWLINE # if it's just a single character, it was meant to be positionalNEWLINE if len(arg_string) == 1:NEWLINE return NoneNEWLINENEWLINE # if the option string before the "=" is present, return the actionNEWLINE if '=' in arg_string:NEWLINE option_string, explicit_arg = arg_string.split('=', 1)NEWLINE if option_string in self._option_string_actions:NEWLINE action = self._option_string_actions[option_string]NEWLINE return action, option_string, explicit_argNEWLINENEWLINE # search through all possible prefixes of the option stringNEWLINE # and all actions in the parser for possible interpretationsNEWLINE option_tuples = self._get_option_tuples(arg_string)NEWLINENEWLINE # if multiple actions match, the option string was ambiguousNEWLINE if len(option_tuples) > 1:NEWLINE options = ', '.join([option_stringNEWLINE for action, option_string, explicit_arg in option_tuples])NEWLINE tup = arg_string, optionsNEWLINE self.error(_('ambiguous option: %s could match %s') % tup)NEWLINENEWLINE # if exactly one action matched, this segmentation is good,NEWLINE # so return the parsed actionNEWLINE elif len(option_tuples) == 1:NEWLINE option_tuple, = option_tuplesNEWLINE return option_tupleNEWLINENEWLINE # if it was not found as an option, but it looks like a negativeNEWLINE # number, it was meant to be positionalNEWLINE # unless there are negative-number-like optionsNEWLINE if self._negative_number_matcher.match(arg_string):NEWLINE if not self._has_negative_number_optionals:NEWLINE return NoneNEWLINENEWLINE # if it contains a space, it was meant to be a positionalNEWLINE if ' ' in arg_string:NEWLINE return NoneNEWLINENEWLINE # it was meant to be an optional but there is no such optionNEWLINE # in this parser (though it might be a valid option in a subparser)NEWLINE return None, arg_string, NoneNEWLINENEWLINE def _get_option_tuples(self, option_string):NEWLINE result = []NEWLINENEWLINE # option strings starting with two prefix characters are onlyNEWLINE # split at the '='NEWLINE chars = self.prefix_charsNEWLINE if option_string[0] in chars and option_string[1] in chars:NEWLINE if '=' in option_string:NEWLINE option_prefix, explicit_arg = option_string.split('=', 1)NEWLINE else:NEWLINE option_prefix = option_stringNEWLINE explicit_arg = NoneNEWLINE for option_string in self._option_string_actions:NEWLINE if option_string.startswith(option_prefix):NEWLINE action = self._option_string_actions[option_string]NEWLINE tup = action, option_string, explicit_argNEWLINE result.append(tup)NEWLINENEWLINE # single character options can be concatenated with their argumentsNEWLINE # but multiple character options always have to have their argumentNEWLINE # separateNEWLINE elif option_string[0] in chars and option_string[1] not in chars:NEWLINE option_prefix = option_stringNEWLINE explicit_arg = NoneNEWLINE short_option_prefix = option_string[:2]NEWLINE short_explicit_arg = option_string[2:]NEWLINENEWLINE for option_string in self._option_string_actions:NEWLINE if option_string == short_option_prefix:NEWLINE action = self._option_string_actions[option_string]NEWLINE tup = action, option_string, short_explicit_argNEWLINE result.append(tup)NEWLINE elif option_string.startswith(option_prefix):NEWLINE action = self._option_string_actions[option_string]NEWLINE tup = action, option_string, explicit_argNEWLINE result.append(tup)NEWLINENEWLINE # shouldn't ever get hereNEWLINE else:NEWLINE self.error(_('unexpected option string: %s') % option_string)NEWLINENEWLINE # return the collected option tuplesNEWLINE return resultNEWLINENEWLINE def _get_nargs_pattern(self, action):NEWLINE # in all examples below, we have to allow for '--' argsNEWLINE # which are represented as '-' in the patternNEWLINE nargs = action.nargsNEWLINENEWLINE # the default (None) is assumed to be a single argumentNEWLINE if nargs is None:NEWLINE nargs_pattern = '(-*A-*)'NEWLINENEWLINE # allow zero or one argumentsNEWLINE elif nargs == OPTIONAL:NEWLINE nargs_pattern = '(-*A?-*)'NEWLINENEWLINE # allow zero or more argumentsNEWLINE elif nargs == ZERO_OR_MORE:NEWLINE nargs_pattern = '(-*[A-]*)'NEWLINENEWLINE # allow one or more argumentsNEWLINE elif nargs == ONE_OR_MORE:NEWLINE nargs_pattern = '(-*A[A-]*)'NEWLINENEWLINE # allow any number of options or argumentsNEWLINE elif nargs == REMAINDER:NEWLINE nargs_pattern = '([-AO]*)'NEWLINENEWLINE # allow one argument followed by any number of options or argumentsNEWLINE elif nargs == PARSER:NEWLINE nargs_pattern = '(-*A[-AO]*)'NEWLINENEWLINE # all others should be integersNEWLINE else:NEWLINE nargs_pattern = '(-*%s-*)' % '-*'.join('A' * nargs)NEWLINENEWLINE # if this is an optional action, -- is not allowedNEWLINE if action.option_strings:NEWLINE nargs_pattern = nargs_pattern.replace('-*', '')NEWLINE nargs_pattern = nargs_pattern.replace('-', '')NEWLINENEWLINE # return the patternNEWLINE return nargs_patternNEWLINENEWLINE # ========================NEWLINE # Value conversion methodsNEWLINE # ========================NEWLINE def _get_values(self, action, arg_strings):NEWLINE # for everything but PARSER, REMAINDER args, strip out first '--'NEWLINE if action.nargs not in [PARSER, REMAINDER]:NEWLINE try:NEWLINE arg_strings.remove('--')NEWLINE except ValueError:NEWLINE passNEWLINENEWLINE # optional argument produces a default when not presentNEWLINE if not arg_strings and action.nargs == OPTIONAL:NEWLINE if action.option_strings:NEWLINE value = action.constNEWLINE else:NEWLINE value = action.defaultNEWLINE if isinstance(value, basestring):NEWLINE value = self._get_value(action, value)NEWLINE self._check_value(action, value)NEWLINENEWLINE # when nargs='*' on a positional, if there were no command-lineNEWLINE # args, use the default if it is anything other than NoneNEWLINE elif (not arg_strings and action.nargs == ZERO_OR_MORE andNEWLINE not action.option_strings):NEWLINE if action.default is not None:NEWLINE value = action.defaultNEWLINE else:NEWLINE value = arg_stringsNEWLINE self._check_value(action, value)NEWLINENEWLINE # single argument or optional argument produces a single valueNEWLINE elif len(arg_strings) == 1 and action.nargs in [None, OPTIONAL]:NEWLINE arg_string, = arg_stringsNEWLINE value = self._get_value(action, arg_string)NEWLINE self._check_value(action, value)NEWLINENEWLINE # REMAINDER arguments convert all values, checking noneNEWLINE elif action.nargs == REMAINDER:NEWLINE value = [self._get_value(action, v) for v in arg_strings]NEWLINENEWLINE # PARSER arguments convert all values, but check only the firstNEWLINE elif action.nargs == PARSER:NEWLINE value = [self._get_value(action, v) for v in arg_strings]NEWLINE self._check_value(action, value[0])NEWLINENEWLINE # all other types of nargs produce a listNEWLINE else:NEWLINE value = [self._get_value(action, v) for v in arg_strings]NEWLINE for v in value:NEWLINE self._check_value(action, v)NEWLINENEWLINE # return the converted valueNEWLINE return valueNEWLINENEWLINE def _get_value(self, action, arg_string):NEWLINE type_func = self._registry_get('type', action.type, action.type)NEWLINE if not _callable(type_func):NEWLINE msg = _('%r is not callable')NEWLINE raise ArgumentError(action, msg % type_func)NEWLINENEWLINE # convert the value to the appropriate typeNEWLINE try:NEWLINE result = type_func(arg_string)NEWLINENEWLINE # ArgumentTypeErrors indicate errorsNEWLINE except ArgumentTypeError:NEWLINE name = getattr(action.type, '__name__', repr(action.type))NEWLINE msg = str(_sys.exc_info()[1])NEWLINE raise ArgumentError(action, msg)NEWLINENEWLINE # TypeErrors or ValueErrors also indicate errorsNEWLINE except (TypeError, ValueError):NEWLINE name = getattr(action.type, '__name__', repr(action.type))NEWLINE msg = _('invalid %s value: %r')NEWLINE raise ArgumentError(action, msg % (name, arg_string))NEWLINENEWLINE # return the converted valueNEWLINE return resultNEWLINENEWLINE def _check_value(self, action, value):NEWLINE # converted value must be one of the choices (if specified)NEWLINE if action.choices is not None and value not in action.choices:NEWLINE tup = value, ', '.join(map(repr, action.choices))NEWLINE msg = _('invalid choice: %r (choose from %s)') % tupNEWLINE raise ArgumentError(action, msg)NEWLINENEWLINE # =======================NEWLINE # Help-formatting methodsNEWLINE # =======================NEWLINE def format_usage(self):NEWLINE formatter = self._get_formatter()NEWLINE formatter.add_usage(self.usage, self._actions,NEWLINE self._mutually_exclusive_groups)NEWLINE return formatter.format_help()NEWLINENEWLINE def format_help(self):NEWLINE formatter = self._get_formatter()NEWLINENEWLINE # usageNEWLINE formatter.add_usage(self.usage, self._actions,NEWLINE self._mutually_exclusive_groups)NEWLINENEWLINE # descriptionNEWLINE formatter.add_text(self.description)NEWLINENEWLINE # positionals, optionals and user-defined groupsNEWLINE for action_group in self._action_groups:NEWLINE formatter.start_section(action_group.title)NEWLINE formatter.add_text(action_group.description)NEWLINE formatter.add_arguments(action_group._group_actions)NEWLINE formatter.end_section()NEWLINENEWLINE # epilogNEWLINE formatter.add_text(self.epilog)NEWLINENEWLINE # determine help from format aboveNEWLINE return formatter.format_help()NEWLINENEWLINE def format_version(self):NEWLINE import warningsNEWLINE warnings.warn(NEWLINE 'The format_version method is deprecated -- the "version" 'NEWLINE 'argument to ArgumentParser is no longer supported.',NEWLINE DeprecationWarning)NEWLINE formatter = self._get_formatter()NEWLINE formatter.add_text(self.version)NEWLINE return formatter.format_help()NEWLINENEWLINE def _get_formatter(self):NEWLINE return self.formatter_class(prog=self.prog)NEWLINENEWLINE # =====================NEWLINE # Help-printing methodsNEWLINE # =====================NEWLINE def print_usage(self, file=None):NEWLINE if file is None:NEWLINE file = _sys.stdoutNEWLINE self._print_message(self.format_usage(), file)NEWLINENEWLINE def print_help(self, file=None):NEWLINE if file is None:NEWLINE file = _sys.stdoutNEWLINE self._print_message(self.format_help(), file)NEWLINENEWLINE def print_version(self, file=None):NEWLINE import warningsNEWLINE warnings.warn(NEWLINE 'The print_version method is deprecated -- the "version" 'NEWLINE 'argument to ArgumentParser is no longer supported.',NEWLINE DeprecationWarning)NEWLINE self._print_message(self.format_version(), file)NEWLINENEWLINE def _print_message(self, message, file=None):NEWLINE if message:NEWLINE if file is None:NEWLINE file = _sys.stderrNEWLINE file.write(message)NEWLINENEWLINE # ===============NEWLINE # Exiting methodsNEWLINE # ===============NEWLINE def exit(self, status=0, message=None):NEWLINE if message:NEWLINE self._print_message(message, _sys.stderr)NEWLINE _sys.exit(status)NEWLINENEWLINE def error(self, message):NEWLINE """error(message: string)NEWLINENEWLINE Prints a usage message incorporating the message to stderr andNEWLINE exits.NEWLINENEWLINE If you override this in a subclass, it should not return -- itNEWLINE should either exit or raise an exception.NEWLINE """NEWLINE self.print_usage(_sys.stderr)NEWLINE self.exit(2, _('%s: error: %s\n') % (self.prog, message))NEWLINE |
"""NEWLINE OpenVINO DL WorkbenchNEWLINE Dataset annotator moduleNEWLINENEWLINE Copyright (c) 2021 Intel CorporationNEWLINENEWLINE Licensed under the Apache License, Version 2.0 (the "License");NEWLINE you may not use this file except in compliance with the License.NEWLINE You may obtain a copy of the License atNEWLINE http://www.apache.org/licenses/LICENSE-2.0NEWLINE Unless required by applicable law or agreed to in writing, softwareNEWLINE distributed under the License is distributed on an "AS IS" BASIS,NEWLINE WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE See the License for the specific language governing permissions andNEWLINE limitations under the License.NEWLINE"""NEWLINEfrom wb.main.scripts.dataset_annotator.dataset_annotator import DatasetAnnotatorNEWLINEfrom wb.main.scripts.dataset_annotator.task_to_auto_annotated_dataset_type_mapper import \NEWLINE TaskToAutoAnnotatedDatasetTypeMapperNEWLINE |
# Copyright (C) 2018-2021 Intel CorporationNEWLINE# SPDX-License-Identifier: Apache-2.0NEWLINENEWLINEimport numpy as npNEWLINENEWLINEfrom openvino.tools.mo.ops.splice import SpliceNEWLINEfrom openvino.tools.mo.front.common.partial_infer.utils import int64_arrayNEWLINEfrom openvino.tools.mo.graph.graph import Graph, NodeNEWLINEfrom openvino.tools.mo.middle.replacement import MiddleReplacementPatternNEWLINEfrom openvino.tools.mo.ops.assign import AssignNEWLINEfrom openvino.tools.mo.ops.concat import ConcatNEWLINEfrom openvino.tools.mo.ops.const import ConstNEWLINEfrom openvino.tools.mo.ops.crop import CropNEWLINEfrom openvino.tools.mo.ops.read_value import ReadValueNEWLINEfrom openvino.tools.mo.ops.result import ResultNEWLINEfrom openvino.tools.mo.utils.error import ErrorNEWLINENEWLINENEWLINEclass ReplaceMemoryOffsetNodePattern(MiddleReplacementPattern):NEWLINE """NEWLINE Replace MemoryOffset with SpliceNEWLINE """NEWLINE enabled = TrueNEWLINENEWLINE def run_before(self):NEWLINE from openvino.tools.mo.middle.RemoveDuplicationMemory import RemoveMemoryDuplicationPatternNEWLINE return [RemoveMemoryDuplicationPattern]NEWLINENEWLINE def run_after(self):NEWLINE from openvino.tools.mo.middle.split_tdnn_memoryoffset import SplitTdnnMemoryOffsetNEWLINE return [SplitTdnnMemoryOffset]NEWLINENEWLINE @staticmethodNEWLINE def pattern():NEWLINE return dict(NEWLINE nodes=[('op', dict(op='MemoryOffset', has_default=False))],NEWLINE edges=[])NEWLINENEWLINE @staticmethodNEWLINE def replace_pattern(graph: Graph, match: dict):NEWLINE node = match['op']NEWLINE pair_node = Node(graph, node.pair_name)NEWLINENEWLINE if pair_node.has_default:NEWLINE returnNEWLINENEWLINE if node.in_port(0).get_source() is not None:NEWLINE input_node_out_port = node.in_port(0).get_source()NEWLINE op_output_id = node.out_port(0).get_destination().node.idNEWLINE out_node_in_ports = pair_node.out_port(0).get_destinations()NEWLINE else:NEWLINE input_node_out_port = pair_node.in_port(0).get_source()NEWLINE op_output_id = pair_node.out_port(0).get_destination().node.idNEWLINE out_node_in_ports = node.out_port(0).get_destinations()NEWLINENEWLINE in_shape = input_node_out_port.data.get_shape().copy()NEWLINENEWLINE node_id = node.idNEWLINE node_name = node.nameNEWLINE node_t = node.tNEWLINENEWLINE splice = Splice(graph, {'name': node_name,NEWLINE 'id': node_id,NEWLINE 'context': int64_array(range(node_t, 1))NEWLINE if node_t < 0 else int64_array(range(0, node_t+1))}).create_node()NEWLINE splice.in_port(0).connect(input_node_out_port)NEWLINENEWLINE # offset of Crop will be 0 (first element) if node_t < 0 and in_shape[1]*node_t (last element) if node_t > 0NEWLINE crop = Crop(graph, {'name': 'Splice_Crop',NEWLINE 'axis': int64_array([1]),NEWLINE 'offset': int64_array([max(0, in_shape[1] * node_t)]),NEWLINE 'dim': int64_array([in_shape[1]])}).create_node()NEWLINENEWLINE splice.out_port(0).connect(crop.in_port(0))NEWLINE splice.out_port(0).data.set_shape(int64_array([in_shape[0], (abs(node_t) + 1) * in_shape[1]]))NEWLINENEWLINE outs = input_node_out_port.get_destinations()NEWLINE for in_port in outs:NEWLINE out_ = in_port.nodeNEWLINE if out_.op == 'Concat' and out_ == out_node_in_ports[0].node:NEWLINE crop_input = Crop(graph, {'name': 'Splice_Crop',NEWLINE 'axis': int64_array([1]),NEWLINE 'offset': int64_array([-min(0, in_shape[1] * node_t)]),NEWLINE 'dim': int64_array([in_shape[1]])}).create_node()NEWLINE splice.out_port(0).connect(crop_input.in_port(0))NEWLINENEWLINE in_port.disconnect()NEWLINE crop_input.out_port(0).connect(in_port)NEWLINE crop_input.out_port(0).data.set_shape(in_shape)NEWLINENEWLINE for dest_port in out_node_in_ports:NEWLINE dest_port.connect(crop.out_port(0))NEWLINENEWLINE graph.remove_node(op_output_id)NEWLINE graph.remove_node(node.id)NEWLINE graph.remove_node(pair_node.id)NEWLINENEWLINENEWLINEclass ReplaceMemoryOffsetWithMemoryNodePattern(MiddleReplacementPattern):NEWLINE """NEWLINE Replace MemoryOffset with Memory if IfDefined used with it to avoid cyclesNEWLINE """NEWLINE enabled = TrueNEWLINE force_shape_inference = TrueNEWLINENEWLINE def run_before(self):NEWLINE from openvino.tools.mo.middle.RemoveDuplicationMemory import RemoveMemoryDuplicationPatternNEWLINE return [RemoveMemoryDuplicationPattern]NEWLINENEWLINE @staticmethodNEWLINE def pattern():NEWLINE return dict(NEWLINE nodes=[('op', dict(op='MemoryOffset', has_default=True))],NEWLINE edges=[])NEWLINENEWLINE @staticmethodNEWLINE def replace_pattern(graph: Graph, match: dict):NEWLINE node = match['op']NEWLINE pair_node = Node(graph, node.pair_name)NEWLINENEWLINE if node.t >= 0:NEWLINE raise Error('Does not support IfDefined with t > 0')NEWLINENEWLINE if node.in_port(0).get_source() is not None:NEWLINE input_port = node.in_port(0).get_source()NEWLINE op_output_id = node.out_port(0).get_destination().node.idNEWLINE out_port = pair_node.out_port(0)NEWLINE node_name = node.nameNEWLINE pair_name = pair_node.nameNEWLINE else:NEWLINE input_port = pair_node.in_port(0).get_source()NEWLINE op_output_id = pair_node.out_port(0).get_destination().node.idNEWLINE out_port = node.out_port(0)NEWLINE node_name = pair_node.nameNEWLINE pair_name = node.nameNEWLINENEWLINE in_shape = input_port.data.get_shape()NEWLINE node_t = abs(node.t)NEWLINENEWLINE init_value_memory_out = Const(graph, {'name': 'init_value_' + pair_name,NEWLINE 'value': np.zeros(int64_array([in_shape[0], in_shape[1]*node_t])),NEWLINE 'shape': int64_array([in_shape[0], in_shape[1]*node_t])}).create_node()NEWLINE memory_out = ReadValue(graph, {'name': pair_name, 'variable_id': node_name+pair_name}).create_node()NEWLINE init_value_memory_out.out_port(0).connect(memory_out.in_port(0))NEWLINENEWLINE if node_t > 1:NEWLINE crop_concat = Crop(graph, {'name': 'Memory_crop', 'dim': np.array([in_shape[1]*(node_t-1)]),NEWLINE 'offset': np.array([in_shape[1]]), 'axis': np.array([1])}).create_node()NEWLINE memory_out.out_port(0).connect(crop_concat.in_port(0))NEWLINE concat = Concat(graph, {'name': 'Memory_concat'}).create_node()NEWLINE concat.add_sequence_of_ports('in', range(2))NEWLINE crop_concat.out_port(0).connect(concat.in_port(0))NEWLINE concat.in_port(1).connect(input_port)NEWLINENEWLINE memory_in = Assign(graph, {'name': node_name, 'variable_id': node_name + pair_name}).create_node()NEWLINE concat.out_port(0).connect(memory_in.in_port(0))NEWLINE out = Result(graph, {'name': 'Memory_output'}).create_node()NEWLINE memory_in.out_port(0).connect(out.in_port(0))NEWLINENEWLINE crop_out = Crop(graph, {'name': 'Memory_crop_out', 'dim': np.array([in_shape[1]]),NEWLINE 'offset': np.array([0]), 'axis': np.array([1])}).create_node()NEWLINE memory_out.out_port(0).connect(crop_out.in_port(0))NEWLINE out_port.get_connection().set_source(crop_out.out_port(0))NEWLINE else:NEWLINE memory_in = Assign(graph, {'name': node_name, 'variable_id': node_name + pair_name}).create_node()NEWLINE memory_in.in_port(0).connect(input_port)NEWLINE out = Result(graph, {'name': 'Memory_output'}).create_node()NEWLINE memory_in.out_port(0).connect(out.in_port(0))NEWLINE out_port.get_connection().set_source(memory_out.out_port(0))NEWLINENEWLINE graph.remove_node(op_output_id)NEWLINE graph.remove_node(node.id)NEWLINE graph.remove_node(pair_node.id)NEWLINE |
import osNEWLINEimport shutilNEWLINENEWLINEimport mathNEWLINEimport sysNEWLINENEWLINEANSI_ESCAPE_SEQUENCE_START = '\x1b'NEWLINEANSI_ESCAPE_SEQUENCE_END = 'm'NEWLINENEWLINENEWLINEdef get_terminal_width():NEWLINE # when piping stdout linux is executing commands in separate process (terminal-less), that's why shutil won't workNEWLINE # so instead of "echo x | program.py | cat" you should use "echo x | (export COLUMNS; program.py | cat"NEWLINENEWLINE # because PyCharm is using separate process for execution, shutil.get_terminal_size() is giving 80, 24NEWLINE if "PYCHARM_HOSTED" in os.environ:NEWLINE return 210NEWLINENEWLINE return shutil.get_terminal_size().columnsNEWLINENEWLINENEWLINEdef get_terminal_height():NEWLINE # when piping stdout linux is executing commands in separate process (terminal-less), that's why shutil won't workNEWLINE # so instead of "echo x | program.py | cat" you should use "echo x | (export COLUMNS; program.py | cat"NEWLINENEWLINE # because PyCharm is using separate process for execution, shutil.get_terminal_size() is giving 80, 24NEWLINE if "PYCHARM_HOSTED" in os.environ:NEWLINE return 40NEWLINENEWLINE return shutil.get_terminal_size().linesNEWLINENEWLINENEWLINEdef fit_text(text, width=None, already_used_characters=0, postfix='...'):NEWLINE width = width or get_terminal_width()NEWLINE if already_used_characters + len(text) > width - len(postfix):NEWLINE return text[:width - already_used_characters - len(postfix)] + postfixNEWLINE else:NEWLINE return textNEWLINENEWLINENEWLINE# TODO: instead of three letter "..." use one character elypsisis: "…" (few places here and maybe another elsewhere?)NEWLINEdef fit_text_printable_part_only(text, width=None, already_used_characters=0, postfix_if_cant_fit='...'):NEWLINE width = width or get_terminal_width()NEWLINE return get_printable_text_substring(text, 0, width - already_used_characters,NEWLINE postfix_if_cant_fit=postfix_if_cant_fit)NEWLINENEWLINENEWLINEdef get_printable_text_substring(text, _from, _len, postfix_if_cant_fit='...'):NEWLINE # print(f'get_printable_text_substring({repr(text)}, {_from}, {_len})')NEWLINE # TODO: https://unix.stackexchange.com/questions/111899/how-to-strip-color-codes-out-of-stdout-and-pipe-to-file-and-stdoutNEWLINE escape_sequence_in_progress = FalseNEWLINE printable_characters = 0NEWLINE output = []NEWLINE flags = []NEWLINE characters_to_skip = _fromNEWLINE characters_skipped = 0NEWLINE for character in text:NEWLINE if character == ANSI_ESCAPE_SEQUENCE_START:NEWLINE escape_sequence_in_progress = TrueNEWLINENEWLINE if printable_characters >= _len and not escape_sequence_in_progress: # text is longer than we can fitNEWLINE if len(postfix_if_cant_fit) > 0:NEWLINE removed_so_far = 0NEWLINE for i in range(len(output) - 1, 0, -1):NEWLINE if not flags[i]: # not non-printable = printableNEWLINE removed_so_far += 1NEWLINE del output[i]NEWLINE if removed_so_far == len(postfix_if_cant_fit):NEWLINE breakNEWLINENEWLINE output.extend(list(postfix_if_cant_fit))NEWLINE breakNEWLINENEWLINE if characters_skipped < characters_to_skip: # if we still skipping X printable charactersNEWLINE if not escape_sequence_in_progress:NEWLINE characters_skipped += 1NEWLINE else: # normal mode (after skipping)NEWLINE output.append(character)NEWLINE flags.append(escape_sequence_in_progress)NEWLINENEWLINE if not escape_sequence_in_progress:NEWLINE printable_characters += 1NEWLINENEWLINE if escape_sequence_in_progress and character == ANSI_ESCAPE_SEQUENCE_END:NEWLINE escape_sequence_in_progress = FalseNEWLINENEWLINE return ''.join(output)NEWLINENEWLINENEWLINEdef get_printable_text_length(text):NEWLINE escape_sequence_in_progress = FalseNEWLINE printable_characters = 0NEWLINE current_sequence_length = 0NEWLINE for character in text.rstrip():NEWLINE if character == ANSI_ESCAPE_SEQUENCE_START:NEWLINE escape_sequence_in_progress = TrueNEWLINE current_sequence_length = 0NEWLINENEWLINE if not escape_sequence_in_progress:NEWLINE printable_characters += 1NEWLINE else:NEWLINE current_sequence_length += 1NEWLINENEWLINE if escape_sequence_in_progress and character == ANSI_ESCAPE_SEQUENCE_END:NEWLINE escape_sequence_in_progress = FalseNEWLINE current_sequence_length = 0NEWLINENEWLINE printable_characters += current_sequence_lengthNEWLINENEWLINE return printable_charactersNEWLINENEWLINENEWLINEdef get_last_ansi_sequence(text):NEWLINE starting_pos = text.rfind(ANSI_ESCAPE_SEQUENCE_START)NEWLINE if starting_pos == -1:NEWLINE return ''NEWLINE ending_pos = text.find(ANSI_ESCAPE_SEQUENCE_END, starting_pos)NEWLINE if ending_pos == -1:NEWLINE return ''NEWLINENEWLINE return text[starting_pos:ending_pos + 1]NEWLINENEWLINENEWLINEdef colorized_center(text, width, fill_char, left_color, middle_color, right_color, rainbow=False):NEWLINE output = ''NEWLINE text_len = len(str(text))NEWLINE remaining_len = width - text_lenNEWLINE for i in range(int(math.floor(remaining_len / 2))):NEWLINE cur_color_index = left_color if not rainbow else i % 16NEWLINE output += colorize_text(fill_char, cur_color_index)NEWLINE output += colorize_text(text, middle_color)NEWLINE for i in range(int(math.ceil(remaining_len / 2))):NEWLINE cur_color_index = right_color if not rainbow else i % 16NEWLINE output += colorize_text(fill_char, cur_color_index)NEWLINE output += colorize_text('', 255)NEWLINE return outputNEWLINENEWLINENEWLINE# TODO: split into color_start, color_end then implement:NEWLINE# def colorize_text(text, color, normal_color):NEWLINE# return color_start(color) + text + color_end(normal_color)NEWLINENEWLINENEWLINEdef colorize_text(text, color):NEWLINE return f'\x1b[38;5;{color}m{text}'NEWLINENEWLINENEWLINEdef reset_color():NEWLINE return '\x1b[39m'NEWLINENEWLINENEWLINEdef clear_to_end_of_line():NEWLINE return '\x1b[K'NEWLINENEWLINENEWLINEdef clear_to_end_of_screen():NEWLINE return '\x1b[J'NEWLINENEWLINENEWLINEdef get_underscore_start():NEWLINE return '\x1b[4m'NEWLINENEWLINENEWLINEdef get_underscore_end():NEWLINE return '\x1b[24m'NEWLINENEWLINENEWLINEdef get_move_left(character_count):NEWLINE if character_count > 0:NEWLINE return f'\x1b[{character_count}D'NEWLINE else:NEWLINE return ''NEWLINENEWLINENEWLINEdef get_move_up(lines):NEWLINE return f'\x1b[{lines}A'NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE orig = '12345\x1b[38;5;m1'NEWLINE for i in range(4, 6 + 1):NEWLINE out_dots = get_printable_text_substring(orig, 0, i)NEWLINE out_empty = get_printable_text_substring(orig, 0, i, postfix_if_cant_fit="")NEWLINE print(f'{i}# {orig} + "..." -> {out_dots} ({len(out_dots)}:{get_printable_text_length(out_dots)})')NEWLINE print(f'{i}# {orig} + "" -> {out_empty} ({len(out_empty)}:{get_printable_text_length(out_empty)})')NEWLINENEWLINENEWLINEdef replace_whitespace_characters_by_their_representations(original_lines):NEWLINE # TODO: use string.translateNEWLINE replace_pairs = [['\n', '\\n'], ['\t', '\\t'], ['\r', '\\r'], ['\f', '\\f'], ['\b', '\\b'], ['\x0b', '\\x0b']]NEWLINE text = original_linesNEWLINE for replace_from, replace_to in replace_pairs:NEWLINE text = text.replace(replace_from, replace_to)NEWLINE return textNEWLINENEWLINENEWLINEdef is_piping_text():NEWLINE return not os.isatty(sys.stdin.fileno())NEWLINENEWLINENEWLINEdef read_text_from_pipe(encoding='utf8', errors='replace'):NEWLINE return sys.stdin.buffer.read().decode(encoding, errors)NEWLINE |
# coding=utf-8NEWLINE# --------------------------------------------------------------------------NEWLINE# Copyright (c) Microsoft Corporation. All rights reserved.NEWLINE# Licensed under the MIT License. See License.txt in the project root for license information.NEWLINE# Code generated by Microsoft (R) AutoRest Code Generator.NEWLINE# Changes may cause incorrect behavior and will be lost if the code is regenerated.NEWLINE# --------------------------------------------------------------------------NEWLINEfrom typing import TYPE_CHECKINGNEWLINEimport warningsNEWLINENEWLINEfrom azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_errorNEWLINEfrom azure.core.paging import ItemPagedNEWLINEfrom azure.core.pipeline import PipelineResponseNEWLINEfrom azure.core.pipeline.transport import HttpRequest, HttpResponseNEWLINEfrom azure.core.polling import LROPoller, NoPolling, PollingMethodNEWLINEfrom azure.mgmt.core.exceptions import ARMErrorFormatNEWLINEfrom azure.mgmt.core.polling.arm_polling import ARMPollingNEWLINENEWLINEfrom .. import models as _modelsNEWLINENEWLINEif TYPE_CHECKING:NEWLINE # pylint: disable=unused-import,ungrouped-importsNEWLINE from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, UnionNEWLINENEWLINE T = TypeVar('T')NEWLINE ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]NEWLINENEWLINEclass StorageAccountsOperations(object):NEWLINE """StorageAccountsOperations operations.NEWLINENEWLINE You should not instantiate this class directly. Instead, you should create a Client instance thatNEWLINE instantiates it for you and attaches it as an attribute.NEWLINENEWLINE :ivar models: Alias to model classes used in this operation group.NEWLINE :type models: ~azure.mgmt.storage.v2018_07_01.modelsNEWLINE :param client: Client for service requests.NEWLINE :param config: Configuration of service client.NEWLINE :param serializer: An object model serializer.NEWLINE :param deserializer: An object model deserializer.NEWLINE """NEWLINENEWLINE models = _modelsNEWLINENEWLINE def __init__(self, client, config, serializer, deserializer):NEWLINE self._client = clientNEWLINE self._serialize = serializerNEWLINE self._deserialize = deserializerNEWLINE self._config = configNEWLINENEWLINE def check_name_availability(NEWLINE self,NEWLINE account_name, # type: "_models.StorageAccountCheckNameAvailabilityParameters"NEWLINE **kwargs # type: AnyNEWLINE ):NEWLINE # type: (...) -> "_models.CheckNameAvailabilityResult"NEWLINE """Checks that the storage account name is valid and is not already in use.NEWLINENEWLINE :param account_name: The name of the storage account within the specified resource group.NEWLINE Storage account names must be between 3 and 24 characters in length and use numbers andNEWLINE lower-case letters only.NEWLINE :type account_name: ~azure.mgmt.storage.v2018_07_01.models.StorageAccountCheckNameAvailabilityParametersNEWLINE :keyword callable cls: A custom type or function that will be passed the direct responseNEWLINE :return: CheckNameAvailabilityResult, or the result of cls(response)NEWLINE :rtype: ~azure.mgmt.storage.v2018_07_01.models.CheckNameAvailabilityResultNEWLINE :raises: ~azure.core.exceptions.HttpResponseErrorNEWLINE """NEWLINE cls = kwargs.pop('cls', None) # type: ClsType["_models.CheckNameAvailabilityResult"]NEWLINE error_map = {NEWLINE 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsErrorNEWLINE }NEWLINE error_map.update(kwargs.pop('error_map', {}))NEWLINE api_version = "2018-07-01"NEWLINE content_type = kwargs.pop("content_type", "application/json")NEWLINE accept = "application/json"NEWLINENEWLINE # Construct URLNEWLINE url = self.check_name_availability.metadata['url'] # type: ignoreNEWLINE path_format_arguments = {NEWLINE 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),NEWLINE }NEWLINE url = self._client.format_url(url, **path_format_arguments)NEWLINENEWLINE # Construct parametersNEWLINE query_parameters = {} # type: Dict[str, Any]NEWLINE query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')NEWLINENEWLINE # Construct headersNEWLINE header_parameters = {} # type: Dict[str, Any]NEWLINE header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')NEWLINE header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')NEWLINENEWLINE body_content_kwargs = {} # type: Dict[str, Any]NEWLINE body_content = self._serialize.body(account_name, 'StorageAccountCheckNameAvailabilityParameters')NEWLINE body_content_kwargs['content'] = body_contentNEWLINE request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)NEWLINE pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)NEWLINE response = pipeline_response.http_responseNEWLINENEWLINE if response.status_code not in [200]:NEWLINE map_error(status_code=response.status_code, response=response, error_map=error_map)NEWLINE raise HttpResponseError(response=response, error_format=ARMErrorFormat)NEWLINENEWLINE deserialized = self._deserialize('CheckNameAvailabilityResult', pipeline_response)NEWLINENEWLINE if cls:NEWLINE return cls(pipeline_response, deserialized, {})NEWLINENEWLINE return deserializedNEWLINE check_name_availability.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Storage/checkNameAvailability'} # type: ignoreNEWLINENEWLINE def _create_initial(NEWLINE self,NEWLINE resource_group_name, # type: strNEWLINE account_name, # type: strNEWLINE parameters, # type: "_models.StorageAccountCreateParameters"NEWLINE **kwargs # type: AnyNEWLINE ):NEWLINE # type: (...) -> Optional["_models.StorageAccount"]NEWLINE cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.StorageAccount"]]NEWLINE error_map = {NEWLINE 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsErrorNEWLINE }NEWLINE error_map.update(kwargs.pop('error_map', {}))NEWLINE api_version = "2018-07-01"NEWLINE content_type = kwargs.pop("content_type", "application/json")NEWLINE accept = "application/json"NEWLINENEWLINE # Construct URLNEWLINE url = self._create_initial.metadata['url'] # type: ignoreNEWLINE path_format_arguments = {NEWLINE 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),NEWLINE 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3),NEWLINE 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),NEWLINE }NEWLINE url = self._client.format_url(url, **path_format_arguments)NEWLINENEWLINE # Construct parametersNEWLINE query_parameters = {} # type: Dict[str, Any]NEWLINE query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')NEWLINENEWLINE # Construct headersNEWLINE header_parameters = {} # type: Dict[str, Any]NEWLINE header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')NEWLINE header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')NEWLINENEWLINE body_content_kwargs = {} # type: Dict[str, Any]NEWLINE body_content = self._serialize.body(parameters, 'StorageAccountCreateParameters')NEWLINE body_content_kwargs['content'] = body_contentNEWLINE request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)NEWLINE pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)NEWLINE response = pipeline_response.http_responseNEWLINENEWLINE if response.status_code not in [200, 202]:NEWLINE map_error(status_code=response.status_code, response=response, error_map=error_map)NEWLINE raise HttpResponseError(response=response, error_format=ARMErrorFormat)NEWLINENEWLINE deserialized = NoneNEWLINE if response.status_code == 200:NEWLINE deserialized = self._deserialize('StorageAccount', pipeline_response)NEWLINENEWLINE if cls:NEWLINE return cls(pipeline_response, deserialized, {})NEWLINENEWLINE return deserializedNEWLINE _create_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}'} # type: ignoreNEWLINENEWLINE def begin_create(NEWLINE self,NEWLINE resource_group_name, # type: strNEWLINE account_name, # type: strNEWLINE parameters, # type: "_models.StorageAccountCreateParameters"NEWLINE **kwargs # type: AnyNEWLINE ):NEWLINE # type: (...) -> LROPoller["_models.StorageAccount"]NEWLINE """Asynchronously creates a new storage account with the specified parameters. If an account isNEWLINE already created and a subsequent create request is issued with different properties, theNEWLINE account properties will be updated. If an account is already created and a subsequent create orNEWLINE update request is issued with the exact same set of properties, the request will succeed.NEWLINENEWLINE :param resource_group_name: The name of the resource group within the user's subscription. TheNEWLINE name is case insensitive.NEWLINE :type resource_group_name: strNEWLINE :param account_name: The name of the storage account within the specified resource group.NEWLINE Storage account names must be between 3 and 24 characters in length and use numbers andNEWLINE lower-case letters only.NEWLINE :type account_name: strNEWLINE :param parameters: The parameters to provide for the created account.NEWLINE :type parameters: ~azure.mgmt.storage.v2018_07_01.models.StorageAccountCreateParametersNEWLINE :keyword callable cls: A custom type or function that will be passed the direct responseNEWLINE :keyword str continuation_token: A continuation token to restart a poller from a saved state.NEWLINE :keyword polling: By default, your polling method will be ARMPolling.NEWLINE Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.NEWLINE :paramtype polling: bool or ~azure.core.polling.PollingMethodNEWLINE :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.NEWLINE :return: An instance of LROPoller that returns either StorageAccount or the result of cls(response)NEWLINE :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.v2018_07_01.models.StorageAccount]NEWLINE :raises ~azure.core.exceptions.HttpResponseError:NEWLINE """NEWLINE polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]NEWLINE cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageAccount"]NEWLINE lro_delay = kwargs.pop(NEWLINE 'polling_interval',NEWLINE self._config.polling_intervalNEWLINE )NEWLINE cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]NEWLINE if cont_token is None:NEWLINE raw_result = self._create_initial(NEWLINE resource_group_name=resource_group_name,NEWLINE account_name=account_name,NEWLINE parameters=parameters,NEWLINE cls=lambda x,y,z: x,NEWLINE **kwargsNEWLINE )NEWLINENEWLINE kwargs.pop('error_map', None)NEWLINE kwargs.pop('content_type', None)NEWLINENEWLINE def get_long_running_output(pipeline_response):NEWLINE deserialized = self._deserialize('StorageAccount', pipeline_response)NEWLINENEWLINE if cls:NEWLINE return cls(pipeline_response, deserialized, {})NEWLINE return deserializedNEWLINENEWLINE path_format_arguments = {NEWLINE 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),NEWLINE 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3),NEWLINE 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),NEWLINE }NEWLINENEWLINE if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)NEWLINE elif polling is False: polling_method = NoPolling()NEWLINE else: polling_method = pollingNEWLINE if cont_token:NEWLINE return LROPoller.from_continuation_token(NEWLINE polling_method=polling_method,NEWLINE continuation_token=cont_token,NEWLINE client=self._client,NEWLINE deserialization_callback=get_long_running_outputNEWLINE )NEWLINE else:NEWLINE return LROPoller(self._client, raw_result, get_long_running_output, polling_method)NEWLINE begin_create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}'} # type: ignoreNEWLINENEWLINE def delete(NEWLINE self,NEWLINE resource_group_name, # type: strNEWLINE account_name, # type: strNEWLINE **kwargs # type: AnyNEWLINE ):NEWLINE # type: (...) -> NoneNEWLINE """Deletes a storage account in Microsoft Azure.NEWLINENEWLINE :param resource_group_name: The name of the resource group within the user's subscription. TheNEWLINE name is case insensitive.NEWLINE :type resource_group_name: strNEWLINE :param account_name: The name of the storage account within the specified resource group.NEWLINE Storage account names must be between 3 and 24 characters in length and use numbers andNEWLINE lower-case letters only.NEWLINE :type account_name: strNEWLINE :keyword callable cls: A custom type or function that will be passed the direct responseNEWLINE :return: None, or the result of cls(response)NEWLINE :rtype: NoneNEWLINE :raises: ~azure.core.exceptions.HttpResponseErrorNEWLINE """NEWLINE cls = kwargs.pop('cls', None) # type: ClsType[None]NEWLINE error_map = {NEWLINE 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsErrorNEWLINE }NEWLINE error_map.update(kwargs.pop('error_map', {}))NEWLINE api_version = "2018-07-01"NEWLINENEWLINE # Construct URLNEWLINE url = self.delete.metadata['url'] # type: ignoreNEWLINE path_format_arguments = {NEWLINE 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),NEWLINE 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3),NEWLINE 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),NEWLINE }NEWLINE url = self._client.format_url(url, **path_format_arguments)NEWLINENEWLINE # Construct parametersNEWLINE query_parameters = {} # type: Dict[str, Any]NEWLINE query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')NEWLINENEWLINE # Construct headersNEWLINE header_parameters = {} # type: Dict[str, Any]NEWLINENEWLINE request = self._client.delete(url, query_parameters, header_parameters)NEWLINE pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)NEWLINE response = pipeline_response.http_responseNEWLINENEWLINE if response.status_code not in [200, 204]:NEWLINE map_error(status_code=response.status_code, response=response, error_map=error_map)NEWLINE raise HttpResponseError(response=response, error_format=ARMErrorFormat)NEWLINENEWLINE if cls:NEWLINE return cls(pipeline_response, None, {})NEWLINENEWLINE delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}'} # type: ignoreNEWLINENEWLINE def get_properties(NEWLINE self,NEWLINE resource_group_name, # type: strNEWLINE account_name, # type: strNEWLINE expand="geoReplicationStats", # type: Optional[str]NEWLINE **kwargs # type: AnyNEWLINE ):NEWLINE # type: (...) -> "_models.StorageAccount"NEWLINE """Returns the properties for the specified storage account including but not limited to name, SKUNEWLINE name, location, and account status. The ListKeys operation should be used to retrieve storageNEWLINE keys.NEWLINENEWLINE :param resource_group_name: The name of the resource group within the user's subscription. TheNEWLINE name is case insensitive.NEWLINE :type resource_group_name: strNEWLINE :param account_name: The name of the storage account within the specified resource group.NEWLINE Storage account names must be between 3 and 24 characters in length and use numbers andNEWLINE lower-case letters only.NEWLINE :type account_name: strNEWLINE :param expand: May be used to expand the properties within account's properties. By default,NEWLINE data is not included when fetching properties. Currently we only support geoReplicationStats.NEWLINE :type expand: strNEWLINE :keyword callable cls: A custom type or function that will be passed the direct responseNEWLINE :return: StorageAccount, or the result of cls(response)NEWLINE :rtype: ~azure.mgmt.storage.v2018_07_01.models.StorageAccountNEWLINE :raises: ~azure.core.exceptions.HttpResponseErrorNEWLINE """NEWLINE cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageAccount"]NEWLINE error_map = {NEWLINE 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsErrorNEWLINE }NEWLINE error_map.update(kwargs.pop('error_map', {}))NEWLINE api_version = "2018-07-01"NEWLINE accept = "application/json"NEWLINENEWLINE # Construct URLNEWLINE url = self.get_properties.metadata['url'] # type: ignoreNEWLINE path_format_arguments = {NEWLINE 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),NEWLINE 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3),NEWLINE 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),NEWLINE }NEWLINE url = self._client.format_url(url, **path_format_arguments)NEWLINENEWLINE # Construct parametersNEWLINE query_parameters = {} # type: Dict[str, Any]NEWLINE query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')NEWLINE if expand is not None:NEWLINE query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')NEWLINENEWLINE # Construct headersNEWLINE header_parameters = {} # type: Dict[str, Any]NEWLINE header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')NEWLINENEWLINE request = self._client.get(url, query_parameters, header_parameters)NEWLINE pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)NEWLINE response = pipeline_response.http_responseNEWLINENEWLINE if response.status_code not in [200]:NEWLINE map_error(status_code=response.status_code, response=response, error_map=error_map)NEWLINE raise HttpResponseError(response=response, error_format=ARMErrorFormat)NEWLINENEWLINE deserialized = self._deserialize('StorageAccount', pipeline_response)NEWLINENEWLINE if cls:NEWLINE return cls(pipeline_response, deserialized, {})NEWLINENEWLINE return deserializedNEWLINE get_properties.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}'} # type: ignoreNEWLINENEWLINE def update(NEWLINE self,NEWLINE resource_group_name, # type: strNEWLINE account_name, # type: strNEWLINE parameters, # type: "_models.StorageAccountUpdateParameters"NEWLINE **kwargs # type: AnyNEWLINE ):NEWLINE # type: (...) -> "_models.StorageAccount"NEWLINE """The update operation can be used to update the SKU, encryption, access tier, or tags for aNEWLINE storage account. It can also be used to map the account to a custom domain. Only one customNEWLINE domain is supported per storage account; the replacement/change of custom domain is notNEWLINE supported. In order to replace an old custom domain, the old value must be cleared/unregisteredNEWLINE before a new value can be set. The update of multiple properties is supported. This call doesNEWLINE not change the storage keys for the account. If you want to change the storage account keys,NEWLINE use the regenerate keys operation. The location and name of the storage account cannot beNEWLINE changed after creation.NEWLINENEWLINE :param resource_group_name: The name of the resource group within the user's subscription. TheNEWLINE name is case insensitive.NEWLINE :type resource_group_name: strNEWLINE :param account_name: The name of the storage account within the specified resource group.NEWLINE Storage account names must be between 3 and 24 characters in length and use numbers andNEWLINE lower-case letters only.NEWLINE :type account_name: strNEWLINE :param parameters: The parameters to provide for the updated account.NEWLINE :type parameters: ~azure.mgmt.storage.v2018_07_01.models.StorageAccountUpdateParametersNEWLINE :keyword callable cls: A custom type or function that will be passed the direct responseNEWLINE :return: StorageAccount, or the result of cls(response)NEWLINE :rtype: ~azure.mgmt.storage.v2018_07_01.models.StorageAccountNEWLINE :raises: ~azure.core.exceptions.HttpResponseErrorNEWLINE """NEWLINE cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageAccount"]NEWLINE error_map = {NEWLINE 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsErrorNEWLINE }NEWLINE error_map.update(kwargs.pop('error_map', {}))NEWLINE api_version = "2018-07-01"NEWLINE content_type = kwargs.pop("content_type", "application/json")NEWLINE accept = "application/json"NEWLINENEWLINE # Construct URLNEWLINE url = self.update.metadata['url'] # type: ignoreNEWLINE path_format_arguments = {NEWLINE 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),NEWLINE 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3),NEWLINE 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),NEWLINE }NEWLINE url = self._client.format_url(url, **path_format_arguments)NEWLINENEWLINE # Construct parametersNEWLINE query_parameters = {} # type: Dict[str, Any]NEWLINE query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')NEWLINENEWLINE # Construct headersNEWLINE header_parameters = {} # type: Dict[str, Any]NEWLINE header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')NEWLINE header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')NEWLINENEWLINE body_content_kwargs = {} # type: Dict[str, Any]NEWLINE body_content = self._serialize.body(parameters, 'StorageAccountUpdateParameters')NEWLINE body_content_kwargs['content'] = body_contentNEWLINE request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)NEWLINE pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)NEWLINE response = pipeline_response.http_responseNEWLINENEWLINE if response.status_code not in [200]:NEWLINE map_error(status_code=response.status_code, response=response, error_map=error_map)NEWLINE raise HttpResponseError(response=response, error_format=ARMErrorFormat)NEWLINENEWLINE deserialized = self._deserialize('StorageAccount', pipeline_response)NEWLINENEWLINE if cls:NEWLINE return cls(pipeline_response, deserialized, {})NEWLINENEWLINE return deserializedNEWLINE update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}'} # type: ignoreNEWLINENEWLINE def list(NEWLINE self,NEWLINE **kwargs # type: AnyNEWLINE ):NEWLINE # type: (...) -> Iterable["_models.StorageAccountListResult"]NEWLINE """Lists all the storage accounts available under the subscription. Note that storage keys are notNEWLINE returned; use the ListKeys operation for this.NEWLINENEWLINE :keyword callable cls: A custom type or function that will be passed the direct responseNEWLINE :return: An iterator like instance of either StorageAccountListResult or the result of cls(response)NEWLINE :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.v2018_07_01.models.StorageAccountListResult]NEWLINE :raises: ~azure.core.exceptions.HttpResponseErrorNEWLINE """NEWLINE cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageAccountListResult"]NEWLINE error_map = {NEWLINE 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsErrorNEWLINE }NEWLINE error_map.update(kwargs.pop('error_map', {}))NEWLINE api_version = "2018-07-01"NEWLINE accept = "application/json"NEWLINENEWLINE def prepare_request(next_link=None):NEWLINE # Construct headersNEWLINE header_parameters = {} # type: Dict[str, Any]NEWLINE header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')NEWLINENEWLINE if not next_link:NEWLINE # Construct URLNEWLINE url = self.list.metadata['url'] # type: ignoreNEWLINE path_format_arguments = {NEWLINE 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),NEWLINE }NEWLINE url = self._client.format_url(url, **path_format_arguments)NEWLINE # Construct parametersNEWLINE query_parameters = {} # type: Dict[str, Any]NEWLINE query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')NEWLINENEWLINE request = self._client.get(url, query_parameters, header_parameters)NEWLINE else:NEWLINE url = next_linkNEWLINE query_parameters = {} # type: Dict[str, Any]NEWLINE request = self._client.get(url, query_parameters, header_parameters)NEWLINE return requestNEWLINENEWLINE def extract_data(pipeline_response):NEWLINE deserialized = self._deserialize('StorageAccountListResult', pipeline_response)NEWLINE list_of_elem = deserialized.valueNEWLINE if cls:NEWLINE list_of_elem = cls(list_of_elem)NEWLINE return None, iter(list_of_elem)NEWLINENEWLINE def get_next(next_link=None):NEWLINE request = prepare_request(next_link)NEWLINENEWLINE pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)NEWLINE response = pipeline_response.http_responseNEWLINENEWLINE if response.status_code not in [200]:NEWLINE map_error(status_code=response.status_code, response=response, error_map=error_map)NEWLINE raise HttpResponseError(response=response, error_format=ARMErrorFormat)NEWLINENEWLINE return pipeline_responseNEWLINENEWLINE return ItemPaged(NEWLINE get_next, extract_dataNEWLINE )NEWLINE list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Storage/storageAccounts'} # type: ignoreNEWLINENEWLINE def list_by_resource_group(NEWLINE self,NEWLINE resource_group_name, # type: strNEWLINE **kwargs # type: AnyNEWLINE ):NEWLINE # type: (...) -> Iterable["_models.StorageAccountListResult"]NEWLINE """Lists all the storage accounts available under the given resource group. Note that storage keysNEWLINE are not returned; use the ListKeys operation for this.NEWLINENEWLINE :param resource_group_name: The name of the resource group within the user's subscription. TheNEWLINE name is case insensitive.NEWLINE :type resource_group_name: strNEWLINE :keyword callable cls: A custom type or function that will be passed the direct responseNEWLINE :return: An iterator like instance of either StorageAccountListResult or the result of cls(response)NEWLINE :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.v2018_07_01.models.StorageAccountListResult]NEWLINE :raises: ~azure.core.exceptions.HttpResponseErrorNEWLINE """NEWLINE cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageAccountListResult"]NEWLINE error_map = {NEWLINE 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsErrorNEWLINE }NEWLINE error_map.update(kwargs.pop('error_map', {}))NEWLINE api_version = "2018-07-01"NEWLINE accept = "application/json"NEWLINENEWLINE def prepare_request(next_link=None):NEWLINE # Construct headersNEWLINE header_parameters = {} # type: Dict[str, Any]NEWLINE header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')NEWLINENEWLINE if not next_link:NEWLINE # Construct URLNEWLINE url = self.list_by_resource_group.metadata['url'] # type: ignoreNEWLINE path_format_arguments = {NEWLINE 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),NEWLINE 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),NEWLINE }NEWLINE url = self._client.format_url(url, **path_format_arguments)NEWLINE # Construct parametersNEWLINE query_parameters = {} # type: Dict[str, Any]NEWLINE query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')NEWLINENEWLINE request = self._client.get(url, query_parameters, header_parameters)NEWLINE else:NEWLINE url = next_linkNEWLINE query_parameters = {} # type: Dict[str, Any]NEWLINE request = self._client.get(url, query_parameters, header_parameters)NEWLINE return requestNEWLINENEWLINE def extract_data(pipeline_response):NEWLINE deserialized = self._deserialize('StorageAccountListResult', pipeline_response)NEWLINE list_of_elem = deserialized.valueNEWLINE if cls:NEWLINE list_of_elem = cls(list_of_elem)NEWLINE return None, iter(list_of_elem)NEWLINENEWLINE def get_next(next_link=None):NEWLINE request = prepare_request(next_link)NEWLINENEWLINE pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)NEWLINE response = pipeline_response.http_responseNEWLINENEWLINE if response.status_code not in [200]:NEWLINE map_error(status_code=response.status_code, response=response, error_map=error_map)NEWLINE raise HttpResponseError(response=response, error_format=ARMErrorFormat)NEWLINENEWLINE return pipeline_responseNEWLINENEWLINE return ItemPaged(NEWLINE get_next, extract_dataNEWLINE )NEWLINE list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts'} # type: ignoreNEWLINENEWLINE def list_keys(NEWLINE self,NEWLINE resource_group_name, # type: strNEWLINE account_name, # type: strNEWLINE **kwargs # type: AnyNEWLINE ):NEWLINE # type: (...) -> "_models.StorageAccountListKeysResult"NEWLINE """Lists the access keys for the specified storage account.NEWLINENEWLINE :param resource_group_name: The name of the resource group within the user's subscription. TheNEWLINE name is case insensitive.NEWLINE :type resource_group_name: strNEWLINE :param account_name: The name of the storage account within the specified resource group.NEWLINE Storage account names must be between 3 and 24 characters in length and use numbers andNEWLINE lower-case letters only.NEWLINE :type account_name: strNEWLINE :keyword callable cls: A custom type or function that will be passed the direct responseNEWLINE :return: StorageAccountListKeysResult, or the result of cls(response)NEWLINE :rtype: ~azure.mgmt.storage.v2018_07_01.models.StorageAccountListKeysResultNEWLINE :raises: ~azure.core.exceptions.HttpResponseErrorNEWLINE """NEWLINE cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageAccountListKeysResult"]NEWLINE error_map = {NEWLINE 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsErrorNEWLINE }NEWLINE error_map.update(kwargs.pop('error_map', {}))NEWLINE api_version = "2018-07-01"NEWLINE accept = "application/json"NEWLINENEWLINE # Construct URLNEWLINE url = self.list_keys.metadata['url'] # type: ignoreNEWLINE path_format_arguments = {NEWLINE 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),NEWLINE 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3),NEWLINE 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),NEWLINE }NEWLINE url = self._client.format_url(url, **path_format_arguments)NEWLINENEWLINE # Construct parametersNEWLINE query_parameters = {} # type: Dict[str, Any]NEWLINE query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')NEWLINENEWLINE # Construct headersNEWLINE header_parameters = {} # type: Dict[str, Any]NEWLINE header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')NEWLINENEWLINE request = self._client.post(url, query_parameters, header_parameters)NEWLINE pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)NEWLINE response = pipeline_response.http_responseNEWLINENEWLINE if response.status_code not in [200]:NEWLINE map_error(status_code=response.status_code, response=response, error_map=error_map)NEWLINE raise HttpResponseError(response=response, error_format=ARMErrorFormat)NEWLINENEWLINE deserialized = self._deserialize('StorageAccountListKeysResult', pipeline_response)NEWLINENEWLINE if cls:NEWLINE return cls(pipeline_response, deserialized, {})NEWLINENEWLINE return deserializedNEWLINE list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/listKeys'} # type: ignoreNEWLINENEWLINE def regenerate_key(NEWLINE self,NEWLINE resource_group_name, # type: strNEWLINE account_name, # type: strNEWLINE regenerate_key, # type: "_models.StorageAccountRegenerateKeyParameters"NEWLINE **kwargs # type: AnyNEWLINE ):NEWLINE # type: (...) -> "_models.StorageAccountListKeysResult"NEWLINE """Regenerates one of the access keys for the specified storage account.NEWLINENEWLINE :param resource_group_name: The name of the resource group within the user's subscription. TheNEWLINE name is case insensitive.NEWLINE :type resource_group_name: strNEWLINE :param account_name: The name of the storage account within the specified resource group.NEWLINE Storage account names must be between 3 and 24 characters in length and use numbers andNEWLINE lower-case letters only.NEWLINE :type account_name: strNEWLINE :param regenerate_key: Specifies name of the key which should be regenerated -- key1 or key2.NEWLINE :type regenerate_key: ~azure.mgmt.storage.v2018_07_01.models.StorageAccountRegenerateKeyParametersNEWLINE :keyword callable cls: A custom type or function that will be passed the direct responseNEWLINE :return: StorageAccountListKeysResult, or the result of cls(response)NEWLINE :rtype: ~azure.mgmt.storage.v2018_07_01.models.StorageAccountListKeysResultNEWLINE :raises: ~azure.core.exceptions.HttpResponseErrorNEWLINE """NEWLINE cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageAccountListKeysResult"]NEWLINE error_map = {NEWLINE 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsErrorNEWLINE }NEWLINE error_map.update(kwargs.pop('error_map', {}))NEWLINE api_version = "2018-07-01"NEWLINE content_type = kwargs.pop("content_type", "application/json")NEWLINE accept = "application/json"NEWLINENEWLINE # Construct URLNEWLINE url = self.regenerate_key.metadata['url'] # type: ignoreNEWLINE path_format_arguments = {NEWLINE 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),NEWLINE 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3),NEWLINE 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),NEWLINE }NEWLINE url = self._client.format_url(url, **path_format_arguments)NEWLINENEWLINE # Construct parametersNEWLINE query_parameters = {} # type: Dict[str, Any]NEWLINE query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')NEWLINENEWLINE # Construct headersNEWLINE header_parameters = {} # type: Dict[str, Any]NEWLINE header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')NEWLINE header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')NEWLINENEWLINE body_content_kwargs = {} # type: Dict[str, Any]NEWLINE body_content = self._serialize.body(regenerate_key, 'StorageAccountRegenerateKeyParameters')NEWLINE body_content_kwargs['content'] = body_contentNEWLINE request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)NEWLINE pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)NEWLINE response = pipeline_response.http_responseNEWLINENEWLINE if response.status_code not in [200]:NEWLINE map_error(status_code=response.status_code, response=response, error_map=error_map)NEWLINE raise HttpResponseError(response=response, error_format=ARMErrorFormat)NEWLINENEWLINE deserialized = self._deserialize('StorageAccountListKeysResult', pipeline_response)NEWLINENEWLINE if cls:NEWLINE return cls(pipeline_response, deserialized, {})NEWLINENEWLINE return deserializedNEWLINE regenerate_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/regenerateKey'} # type: ignoreNEWLINENEWLINE def list_account_sas(NEWLINE self,NEWLINE resource_group_name, # type: strNEWLINE account_name, # type: strNEWLINE parameters, # type: "_models.AccountSasParameters"NEWLINE **kwargs # type: AnyNEWLINE ):NEWLINE # type: (...) -> "_models.ListAccountSasResponse"NEWLINE """List SAS credentials of a storage account.NEWLINENEWLINE :param resource_group_name: The name of the resource group within the user's subscription. TheNEWLINE name is case insensitive.NEWLINE :type resource_group_name: strNEWLINE :param account_name: The name of the storage account within the specified resource group.NEWLINE Storage account names must be between 3 and 24 characters in length and use numbers andNEWLINE lower-case letters only.NEWLINE :type account_name: strNEWLINE :param parameters: The parameters to provide to list SAS credentials for the storage account.NEWLINE :type parameters: ~azure.mgmt.storage.v2018_07_01.models.AccountSasParametersNEWLINE :keyword callable cls: A custom type or function that will be passed the direct responseNEWLINE :return: ListAccountSasResponse, or the result of cls(response)NEWLINE :rtype: ~azure.mgmt.storage.v2018_07_01.models.ListAccountSasResponseNEWLINE :raises: ~azure.core.exceptions.HttpResponseErrorNEWLINE """NEWLINE cls = kwargs.pop('cls', None) # type: ClsType["_models.ListAccountSasResponse"]NEWLINE error_map = {NEWLINE 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsErrorNEWLINE }NEWLINE error_map.update(kwargs.pop('error_map', {}))NEWLINE api_version = "2018-07-01"NEWLINE content_type = kwargs.pop("content_type", "application/json")NEWLINE accept = "application/json"NEWLINENEWLINE # Construct URLNEWLINE url = self.list_account_sas.metadata['url'] # type: ignoreNEWLINE path_format_arguments = {NEWLINE 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),NEWLINE 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3),NEWLINE 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),NEWLINE }NEWLINE url = self._client.format_url(url, **path_format_arguments)NEWLINENEWLINE # Construct parametersNEWLINE query_parameters = {} # type: Dict[str, Any]NEWLINE query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')NEWLINENEWLINE # Construct headersNEWLINE header_parameters = {} # type: Dict[str, Any]NEWLINE header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')NEWLINE header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')NEWLINENEWLINE body_content_kwargs = {} # type: Dict[str, Any]NEWLINE body_content = self._serialize.body(parameters, 'AccountSasParameters')NEWLINE body_content_kwargs['content'] = body_contentNEWLINE request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)NEWLINE pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)NEWLINE response = pipeline_response.http_responseNEWLINENEWLINE if response.status_code not in [200]:NEWLINE map_error(status_code=response.status_code, response=response, error_map=error_map)NEWLINE raise HttpResponseError(response=response, error_format=ARMErrorFormat)NEWLINENEWLINE deserialized = self._deserialize('ListAccountSasResponse', pipeline_response)NEWLINENEWLINE if cls:NEWLINE return cls(pipeline_response, deserialized, {})NEWLINENEWLINE return deserializedNEWLINE list_account_sas.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/ListAccountSas'} # type: ignoreNEWLINENEWLINE def list_service_sas(NEWLINE self,NEWLINE resource_group_name, # type: strNEWLINE account_name, # type: strNEWLINE parameters, # type: "_models.ServiceSasParameters"NEWLINE **kwargs # type: AnyNEWLINE ):NEWLINE # type: (...) -> "_models.ListServiceSasResponse"NEWLINE """List service SAS credentials of a specific resource.NEWLINENEWLINE :param resource_group_name: The name of the resource group within the user's subscription. TheNEWLINE name is case insensitive.NEWLINE :type resource_group_name: strNEWLINE :param account_name: The name of the storage account within the specified resource group.NEWLINE Storage account names must be between 3 and 24 characters in length and use numbers andNEWLINE lower-case letters only.NEWLINE :type account_name: strNEWLINE :param parameters: The parameters to provide to list service SAS credentials.NEWLINE :type parameters: ~azure.mgmt.storage.v2018_07_01.models.ServiceSasParametersNEWLINE :keyword callable cls: A custom type or function that will be passed the direct responseNEWLINE :return: ListServiceSasResponse, or the result of cls(response)NEWLINE :rtype: ~azure.mgmt.storage.v2018_07_01.models.ListServiceSasResponseNEWLINE :raises: ~azure.core.exceptions.HttpResponseErrorNEWLINE """NEWLINE cls = kwargs.pop('cls', None) # type: ClsType["_models.ListServiceSasResponse"]NEWLINE error_map = {NEWLINE 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsErrorNEWLINE }NEWLINE error_map.update(kwargs.pop('error_map', {}))NEWLINE api_version = "2018-07-01"NEWLINE content_type = kwargs.pop("content_type", "application/json")NEWLINE accept = "application/json"NEWLINENEWLINE # Construct URLNEWLINE url = self.list_service_sas.metadata['url'] # type: ignoreNEWLINE path_format_arguments = {NEWLINE 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),NEWLINE 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3),NEWLINE 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),NEWLINE }NEWLINE url = self._client.format_url(url, **path_format_arguments)NEWLINENEWLINE # Construct parametersNEWLINE query_parameters = {} # type: Dict[str, Any]NEWLINE query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')NEWLINENEWLINE # Construct headersNEWLINE header_parameters = {} # type: Dict[str, Any]NEWLINE header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')NEWLINE header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')NEWLINENEWLINE body_content_kwargs = {} # type: Dict[str, Any]NEWLINE body_content = self._serialize.body(parameters, 'ServiceSasParameters')NEWLINE body_content_kwargs['content'] = body_contentNEWLINE request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)NEWLINE pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)NEWLINE response = pipeline_response.http_responseNEWLINENEWLINE if response.status_code not in [200]:NEWLINE map_error(status_code=response.status_code, response=response, error_map=error_map)NEWLINE raise HttpResponseError(response=response, error_format=ARMErrorFormat)NEWLINENEWLINE deserialized = self._deserialize('ListServiceSasResponse', pipeline_response)NEWLINENEWLINE if cls:NEWLINE return cls(pipeline_response, deserialized, {})NEWLINENEWLINE return deserializedNEWLINE list_service_sas.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/ListServiceSas'} # type: ignoreNEWLINENEWLINE def _failover_initial(NEWLINE self,NEWLINE resource_group_name, # type: strNEWLINE account_name, # type: strNEWLINE **kwargs # type: AnyNEWLINE ):NEWLINE # type: (...) -> NoneNEWLINE cls = kwargs.pop('cls', None) # type: ClsType[None]NEWLINE error_map = {NEWLINE 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsErrorNEWLINE }NEWLINE error_map.update(kwargs.pop('error_map', {}))NEWLINE api_version = "2018-07-01"NEWLINENEWLINE # Construct URLNEWLINE url = self._failover_initial.metadata['url'] # type: ignoreNEWLINE path_format_arguments = {NEWLINE 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),NEWLINE 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3),NEWLINE 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),NEWLINE }NEWLINE url = self._client.format_url(url, **path_format_arguments)NEWLINENEWLINE # Construct parametersNEWLINE query_parameters = {} # type: Dict[str, Any]NEWLINE query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')NEWLINENEWLINE # Construct headersNEWLINE header_parameters = {} # type: Dict[str, Any]NEWLINENEWLINE request = self._client.post(url, query_parameters, header_parameters)NEWLINE pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)NEWLINE response = pipeline_response.http_responseNEWLINENEWLINE if response.status_code not in [200, 202]:NEWLINE map_error(status_code=response.status_code, response=response, error_map=error_map)NEWLINE raise HttpResponseError(response=response, error_format=ARMErrorFormat)NEWLINENEWLINE if cls:NEWLINE return cls(pipeline_response, None, {})NEWLINENEWLINE _failover_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/failover'} # type: ignoreNEWLINENEWLINE def begin_failover(NEWLINE self,NEWLINE resource_group_name, # type: strNEWLINE account_name, # type: strNEWLINE **kwargs # type: AnyNEWLINE ):NEWLINE # type: (...) -> LROPoller[None]NEWLINE """Failover request can be triggered for a storage account in case of availability issues. TheNEWLINE failover occurs from the storage account's primary cluster to secondary cluster for RA-GRSNEWLINE accounts. The secondary cluster will become primary after failover.NEWLINENEWLINE :param resource_group_name: The name of the resource group within the user's subscription. TheNEWLINE name is case insensitive.NEWLINE :type resource_group_name: strNEWLINE :param account_name: The name of the storage account within the specified resource group.NEWLINE Storage account names must be between 3 and 24 characters in length and use numbers andNEWLINE lower-case letters only.NEWLINE :type account_name: strNEWLINE :keyword callable cls: A custom type or function that will be passed the direct responseNEWLINE :keyword str continuation_token: A continuation token to restart a poller from a saved state.NEWLINE :keyword polling: By default, your polling method will be ARMPolling.NEWLINE Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.NEWLINE :paramtype polling: bool or ~azure.core.polling.PollingMethodNEWLINE :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.NEWLINE :return: An instance of LROPoller that returns either None or the result of cls(response)NEWLINE :rtype: ~azure.core.polling.LROPoller[None]NEWLINE :raises ~azure.core.exceptions.HttpResponseError:NEWLINE """NEWLINE polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]NEWLINE cls = kwargs.pop('cls', None) # type: ClsType[None]NEWLINE lro_delay = kwargs.pop(NEWLINE 'polling_interval',NEWLINE self._config.polling_intervalNEWLINE )NEWLINE cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]NEWLINE if cont_token is None:NEWLINE raw_result = self._failover_initial(NEWLINE resource_group_name=resource_group_name,NEWLINE account_name=account_name,NEWLINE cls=lambda x,y,z: x,NEWLINE **kwargsNEWLINE )NEWLINENEWLINE kwargs.pop('error_map', None)NEWLINE kwargs.pop('content_type', None)NEWLINENEWLINE def get_long_running_output(pipeline_response):NEWLINE if cls:NEWLINE return cls(pipeline_response, None, {})NEWLINENEWLINE path_format_arguments = {NEWLINE 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),NEWLINE 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3),NEWLINE 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),NEWLINE }NEWLINENEWLINE if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)NEWLINE elif polling is False: polling_method = NoPolling()NEWLINE else: polling_method = pollingNEWLINE if cont_token:NEWLINE return LROPoller.from_continuation_token(NEWLINE polling_method=polling_method,NEWLINE continuation_token=cont_token,NEWLINE client=self._client,NEWLINE deserialization_callback=get_long_running_outputNEWLINE )NEWLINE else:NEWLINE return LROPoller(self._client, raw_result, get_long_running_output, polling_method)NEWLINE begin_failover.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/failover'} # type: ignoreNEWLINE |
import unittestNEWLINENEWLINEfrom tests.mapreduce import MapReduceTestCaseNEWLINENEWLINENEWLINEdef all_tests():NEWLINE suite = unittest.TestSuite()NEWLINE suite.addTest(unittest.makeSuite(MapReduceTestCase))NEWLINE return suiteNEWLINE |
# Copyright 2019 kubeflow.org.NEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINENEWLINEfrom http import HTTPStatusNEWLINENEWLINEimport tornado.ioloopNEWLINEimport tornado.webNEWLINEimport tornado.httpserverNEWLINEimport argparseNEWLINEimport osNEWLINEimport loggingNEWLINEimport jsonNEWLINEfrom enum import EnumNEWLINEfrom kfserving.model import KFModelNEWLINEfrom typing import List, Dict, Optional, AnyNEWLINEfrom kfserving.protocols.request_handler import RequestHandlerNEWLINEfrom kfserving.protocols.tensorflow_http import TensorflowRequestHandlerNEWLINEfrom kfserving.protocols.seldon_http import SeldonRequestHandlerNEWLINENEWLINEDEFAULT_HTTP_PORT = 8080NEWLINEDEFAULT_GRPC_PORT = 8081NEWLINENEWLINENEWLINEclass Protocol(Enum):NEWLINE tensorflow_http = "tensorflow.http"NEWLINE seldon_http = "seldon.http"NEWLINENEWLINE def __str__(self):NEWLINE return self.valueNEWLINENEWLINENEWLINEparser = argparse.ArgumentParser(add_help=False)NEWLINEparser.add_argument('--http_port', default=DEFAULT_HTTP_PORT, type=int,NEWLINE help='The HTTP Port listened to by the model server.')NEWLINEparser.add_argument('--grpc_port', default=DEFAULT_GRPC_PORT, type=int,NEWLINE help='The GRPC Port listened to by the model server.')NEWLINEparser.add_argument('--protocol', type=Protocol, choices=list(Protocol),NEWLINE default="tensorflow.http",NEWLINE help='The protocol served by the model server')NEWLINEargs, _ = parser.parse_known_args()NEWLINENEWLINEKFSERVER_LOGLEVEL = os.environ.get('KFSERVER_LOGLEVEL', 'INFO').upper()NEWLINElogging.basicConfig(level=KFSERVER_LOGLEVEL)NEWLINENEWLINEPREDICTOR_URL_FORMAT = "http://{0}/v1/models/{1}:predict"NEWLINENEWLINENEWLINEclass KFServer(object):NEWLINE def __init__(self, protocol: Protocol = args.protocol, http_port: int = args.http_port,NEWLINE grpc_port: int = args.grpc_port):NEWLINE self.registered_models: Dict[str, KFModel] = {}NEWLINE self.http_port = http_portNEWLINE self.grpc_port = grpc_portNEWLINE self.protocol = protocolNEWLINE self._http_server: Optional[tornado.httpserver.HTTPServer] = NoneNEWLINENEWLINE def create_application(self):NEWLINE return tornado.web.Application([NEWLINE # Server Liveness API returns 200 if server is alive.NEWLINE (r"/", LivenessHandler),NEWLINE # Protocol Discovery API that returns the serving protocol supported by this server.NEWLINE (r"/protocol", ProtocolHandler, dict(protocol=self.protocol)),NEWLINE # Prometheus Metrics API that returns metrics for model serversNEWLINE (r"/v1/metrics", MetricsHandler, dict(models=self.registered_models)),NEWLINE # Model Health API returns 200 if model is ready to serve.NEWLINE (r"/v1/models/([a-zA-Z0-9_-]+)",NEWLINE ModelHealthHandler, dict(models=self.registered_models)),NEWLINE # Predict API executes executes predict on input tensorsNEWLINE (r"/v1/models/([a-zA-Z0-9_-]+)",NEWLINE ModelPredictHandler, dict(protocol=self.protocol, models=self.registered_models)),NEWLINE # Optional Custom Predict Verb for Tensorflow compatibilityNEWLINE (r"/v1/models/([a-zA-Z0-9_-]+):predict",NEWLINE ModelPredictHandler, dict(protocol=self.protocol, models=self.registered_models)),NEWLINE (r"/v1/models/([a-zA-Z0-9_-]+):explain",NEWLINE ModelExplainHandler, dict(protocol=self.protocol, models=self.registered_models)),NEWLINE ])NEWLINENEWLINE def start(self, models: List[KFModel] = []):NEWLINE # TODO add a GRPC serverNEWLINE for model in models:NEWLINE self.register_model(model)NEWLINENEWLINE self._http_server = tornado.httpserver.HTTPServer(self.create_application())NEWLINENEWLINE logging.info("Listening on port %s" % self.http_port)NEWLINE self._http_server.bind(self.http_port)NEWLINE self._http_server.start(0) # Forks workers equal to host's coresNEWLINE tornado.ioloop.IOLoop.current().start()NEWLINENEWLINE def register_model(self, model: KFModel):NEWLINE if not model.name:NEWLINE raise Exception("Failed to register model, model.name must be provided.")NEWLINE self.registered_models[model.name] = modelNEWLINE logging.info("Registering model:" + model.name)NEWLINENEWLINENEWLINEdef get_request_handler(protocol, request: Dict) -> RequestHandler:NEWLINE if protocol == Protocol.tensorflow_http:NEWLINE return TensorflowRequestHandler(request)NEWLINE else:NEWLINE return SeldonRequestHandler(request)NEWLINENEWLINENEWLINEclass ModelExplainHandler(tornado.web.RequestHandler):NEWLINENEWLINE def initialize(self, protocol: str, models: Dict[str, KFModel]):NEWLINE self.protocol = protocolNEWLINE self.models = modelsNEWLINENEWLINE def post(self, name: str):NEWLINENEWLINE # TODO Add metricsNEWLINE if name not in self.models:NEWLINE raise tornado.web.HTTPError(NEWLINE status_code=HTTPStatus.NOT_FOUND,NEWLINE reason="Model with name %s does not exist." % nameNEWLINE )NEWLINENEWLINE model = self.models[name]NEWLINE if not model.ready:NEWLINE model.load()NEWLINENEWLINE try:NEWLINE body = json.loads(self.request.body)NEWLINE except json.decoder.JSONDecodeError as e:NEWLINE raise tornado.web.HTTPError(NEWLINE status_code=HTTPStatus.BAD_REQUEST,NEWLINE reason="Unrecognized request format: %s" % eNEWLINE )NEWLINENEWLINE request_handler: RequestHandler = get_request_handler(self.protocol, body)NEWLINE request_handler.validate()NEWLINE request = request_handler.extract_request()NEWLINE explanation = model.explain(request)NEWLINENEWLINE self.write(explanation)NEWLINENEWLINENEWLINEclass ModelPredictHandler(tornado.web.RequestHandler):NEWLINE def initialize(self, protocol: str, models: Dict[str, KFModel]):NEWLINE self.protocol = protocolNEWLINE self.models = modelsNEWLINENEWLINE def post(self, name: str):NEWLINE # TODO Add metricsNEWLINE if name not in self.models:NEWLINE raise tornado.web.HTTPError(NEWLINE status_code=HTTPStatus.NOT_FOUND,NEWLINE reason="Model with name %s does not exist." % nameNEWLINE )NEWLINENEWLINE model = self.models[name]NEWLINE if not model.ready:NEWLINE model.load()NEWLINENEWLINE try:NEWLINE body = json.loads(self.request.body)NEWLINE except json.decoder.JSONDecodeError as e:NEWLINE raise tornado.web.HTTPError(NEWLINE status_code=HTTPStatus.BAD_REQUEST,NEWLINE reason="Unrecognized request format: %s" % eNEWLINE )NEWLINENEWLINE # for predictor this is noopNEWLINE # for transformer the preprocess step transforms the body to request that is conforming to data plane protocolNEWLINE request = model.preprocess(body)NEWLINE # validate if the request to predictor is conforming to data plane protocolNEWLINE request_handler: RequestHandler = get_request_handler(self.protocol, request)NEWLINE request_handler.validate()NEWLINE inputs = request_handler.extract_request()NEWLINE # for predictor this does in-place predictionNEWLINE # for transformer it calls out to predictorNEWLINE results = model.predict(inputs)NEWLINE # for predictor this is noopNEWLINE # for transformer the postprocess step transforms the result to what user expectsNEWLINE outputs = model.postprocess(results)NEWLINE response = request_handler.wrap_response(outputs)NEWLINENEWLINE self.write(response)NEWLINENEWLINENEWLINEclass LivenessHandler(tornado.web.RequestHandler):NEWLINE def get(self):NEWLINE self.write("Alive")NEWLINENEWLINENEWLINEclass ProtocolHandler(tornado.web.RequestHandler):NEWLINE def initialize(self, protocol: Protocol):NEWLINE self.protocol = protocolNEWLINENEWLINE def get(self):NEWLINE self.write(str(self.protocol.value))NEWLINENEWLINENEWLINEclass MetricsHandler(tornado.web.RequestHandler):NEWLINE def get(self):NEWLINE self.write("Not Implemented")NEWLINENEWLINENEWLINEclass ModelHealthHandler(tornado.web.RequestHandler):NEWLINE def initialize(self, models: Dict[str, KFModel]):NEWLINE self.models = modelsNEWLINENEWLINE def get(self, name: str):NEWLINE if name not in self.models:NEWLINE raise tornado.web.HTTPError(NEWLINE status_code=404,NEWLINE reason="Model with name %s does not exist." % nameNEWLINE )NEWLINENEWLINE model = self.models[name]NEWLINE self.write(json.dumps({NEWLINE "name": model.name,NEWLINE "ready": model.readyNEWLINE }))NEWLINENEWLINENEWLINEif __name__ == "__main__":NEWLINE s = KFServer()NEWLINE s.start()NEWLINE |
# by Kami BigdelyNEWLINE# Extract classNEWLINEWELL_DONE = 3000NEWLINEMEDIUM = 2500NEWLINECOOKED_CONSTANT = 0.05NEWLINENEWLINENEWLINEdef is_cookeding_criteria_satisfied(food):NEWLINE return is_well_done(NEWLINE food.time, NEWLINE food.temperature, NEWLINE food.pressure, NEWLINE food.desired_stateNEWLINE ) orNEWLINE is_medium(NEWLINE food.time, NEWLINE food.temperature, NEWLINE food.pressure, NEWLINE food.desired_stateNEWLINE )NEWLINENEWLINENEWLINEdef is_well_done(food): NEWLINE return get_cooking_progress(NEWLINE food.time, NEWLINE food.temperature, NEWLINE food.pressure) >= WELL_DONENEWLINENEWLINENEWLINEdef is_medium(food):NEWLINE return get_cooking_progress(NEWLINE food.time, NEWLINE food.temperature, NEWLINE food.pressureNEWLINE ) >= MEDIUMNEWLINENEWLINEdef get_cooking_progress(food):NEWLINE return food.time * food.temperature * food.pressure * food.COOKED_CONSTANTNEWLINENEWLINENEWLINEmy_steak = {NEWLINE time: 30NEWLINE temp: 103NEWLINE pressue: 20NEWLINE desired_state: 'well-done'NEWLINE}NEWLINENEWLINEif is_cookeding_criteria_satisfied(my_steak):NEWLINE print('cooking is done.')NEWLINEelse:NEWLINE print('ongoing cooking.') |
# Copyright 2019 The Vitess Authors.NEWLINE# NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE# NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE# NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINENEWLINE"""Kubernetes environment."""NEWLINENEWLINEimport getpassNEWLINEimport jsonNEWLINEimport loggingNEWLINEimport osNEWLINEimport subprocessNEWLINEimport timeNEWLINENEWLINEfrom sandbox import kubernetes_componentsNEWLINEfrom vtproto import topodata_pb2NEWLINEfrom vtdb import vtgate_clientNEWLINEimport base_environmentNEWLINEimport protocols_flavorNEWLINEimport vtctl_helperNEWLINENEWLINENEWLINEclass K8sEnvironment(base_environment.BaseEnvironment):NEWLINE """Environment for kubernetes clusters on Google Compute Engine."""NEWLINENEWLINE def __init__(self):NEWLINE super(K8sEnvironment, self).__init__()NEWLINENEWLINE def use_named(self, instance_name):NEWLINE # Check to make sure kubectl existsNEWLINE try:NEWLINE subprocess.check_output(['kubectl'])NEWLINE except OSError:NEWLINE raise base_environment.VitessEnvironmentError(NEWLINE 'kubectl not found, please install by visiting kubernetes.io or 'NEWLINE 'running gcloud components update kubectl if using compute engine.')NEWLINENEWLINE vtctld_ip = kubernetes_components.get_forwarded_ip(NEWLINE 'vtctld', instance_name)NEWLINE self.vtctl_addr = '%s:15999' % vtctld_ipNEWLINENEWLINE self.vtctl_helper = vtctl_helper.VtctlHelper('grpc', self.vtctl_addr)NEWLINE self.cluster_name = instance_nameNEWLINENEWLINE keyspaces = self.vtctl_helper.execute_vtctl_command(['GetKeyspaces'])NEWLINE self.mobs = filter(None, keyspaces.split('\n'))NEWLINE self.keyspaces = self.mobsNEWLINENEWLINE if not self.keyspaces:NEWLINE raise base_environment.VitessEnvironmentError(NEWLINE 'Invalid environment, no keyspaces found')NEWLINENEWLINE self.num_shards = []NEWLINE self.shards = []NEWLINENEWLINE for keyspace in self.keyspaces:NEWLINE shards = json.loads(self.vtctl_helper.execute_vtctl_command(NEWLINE ['FindAllShardsInKeyspace', keyspace]))NEWLINE self.shards.append(shards)NEWLINE self.num_shards.append(len(shards))NEWLINENEWLINE # This assumes that all keyspaces use the same set of cellsNEWLINE self.cells = json.loads(self.vtctl_helper.execute_vtctl_command(NEWLINE ['GetShard', '%s/%s' % (self.keyspaces[0], self.shards[0].keys()[0])]NEWLINE ))['cells']NEWLINENEWLINE self.primary_cells = self.cellsNEWLINE self.replica_instances = []NEWLINE self.rdonly_instances = []NEWLINENEWLINE # This assumes that all cells are equivalent for k8s environments.NEWLINE all_tablets_in_a_cell = self.vtctl_helper.execute_vtctl_command(NEWLINE ['ListAllTablets', self.cells[0]])NEWLINE all_tablets_in_a_cell = [x.split(' ') for x inNEWLINE filter(None, all_tablets_in_a_cell.split('\n'))]NEWLINENEWLINE for index, keyspace in enumerate(self.keyspaces):NEWLINE keyspace_tablets_in_cell = [NEWLINE tablet for tablet in all_tablets_in_a_cell if tablet[1] == keyspace]NEWLINE replica_tablets_in_cell = [NEWLINE tablet for tablet in keyspace_tablets_in_cellNEWLINE if tablet[3] == 'master' or tablet[3] == 'replica']NEWLINE replica_instances = len(replica_tablets_in_cell) / self.num_shards[index]NEWLINE self.replica_instances.append(replica_instances)NEWLINE self.rdonly_instances.append(NEWLINE (len(keyspace_tablets_in_cell) / self.num_shards[index]) -NEWLINE replica_instances)NEWLINENEWLINE # Converts keyspace name and alias to number of instancesNEWLINE self.keyspace_alias_to_num_instances_dict = {}NEWLINE for index, keyspace in enumerate(self.keyspaces):NEWLINE self.keyspace_alias_to_num_instances_dict[keyspace] = {NEWLINE 'replica': int(self.replica_instances[index]),NEWLINE 'rdonly': int(self.rdonly_instances[index])NEWLINE }NEWLINENEWLINE self.vtgate_addrs = {}NEWLINE for cell in self.cells:NEWLINE vtgate_ip = kubernetes_components.get_forwarded_ip(NEWLINE 'vtgate-%s' % cell, instance_name)NEWLINE self.vtgate_addrs[cell] = '%s:15991' % vtgate_ipNEWLINE super(K8sEnvironment, self).use_named(instance_name)NEWLINENEWLINE def create(self, **kwargs):NEWLINE self.create_gke_cluster = (NEWLINE kwargs.get('create_gke_cluster', 'false').lower() != 'false')NEWLINE if self.create_gke_cluster and 'GKE_NUM_NODES' not in kwargs:NEWLINE raise base_environment.VitessEnvironmentError(NEWLINE 'Must specify GKE_NUM_NODES')NEWLINE if 'GKE_CLUSTER_NAME' not in kwargs:NEWLINE kwargs['GKE_CLUSTER_NAME'] = getpass.getuser()NEWLINE if 'VITESS_NAME' not in kwargs:NEWLINE kwargs['VITESS_NAME'] = getpass.getuser()NEWLINE kwargs['TEST_MODE'] = '1'NEWLINE self.script_dir = os.path.join(os.environ['VTROOT'], 'examples/kubernetes')NEWLINE try:NEWLINE subprocess.check_output(['gcloud', 'config', 'list'])NEWLINE except OSError:NEWLINE raise base_environment.VitessEnvironmentError(NEWLINE 'gcloud not found, please install by visiting cloud.google.com')NEWLINE if 'project' in kwargs:NEWLINE logging.info('Setting project to %s', kwargs['project'])NEWLINE subprocess.check_output(NEWLINE ['gcloud', 'config', 'set', 'project', kwargs['project']])NEWLINE project_name_json = json.loads(subprocess.check_output(NEWLINE ['gcloud', 'config', 'list', 'project', '--format', 'json']))NEWLINE project_name = project_name_json['core']['project']NEWLINE logging.info('Current project name: %s', project_name)NEWLINE for k, v in kwargs.iteritems():NEWLINE os.environ[k] = vNEWLINE if self.create_gke_cluster:NEWLINE cluster_up_txt = subprocess.check_output(NEWLINE [os.path.join(self.script_dir, 'cluster-up.sh')],NEWLINE cwd=self.script_dir, stderr=subprocess.STDOUT)NEWLINE logging.info(cluster_up_txt)NEWLINE vitess_up_output = subprocess.check_output(NEWLINE [os.path.join(self.script_dir, 'vitess-up.sh')],NEWLINE cwd=self.script_dir, stderr=subprocess.STDOUT)NEWLINE logging.info(vitess_up_output)NEWLINE self.use_named(kwargs['VITESS_NAME'])NEWLINENEWLINE def destroy(self):NEWLINE vitess_down_output = subprocess.check_output(NEWLINE [os.path.join(self.script_dir, 'vitess-down.sh')],NEWLINE cwd=self.script_dir, stderr=subprocess.STDOUT)NEWLINE logging.info(vitess_down_output)NEWLINE if self.create_gke_cluster:NEWLINE cluster_down_output = subprocess.check_output(NEWLINE [os.path.join(self.script_dir, 'cluster-down.sh')],NEWLINE cwd=self.script_dir, stderr=subprocess.STDOUT)NEWLINE logging.info(cluster_down_output)NEWLINENEWLINE def get_vtgate_conn(self, cell):NEWLINE return vtgate_client.connect(NEWLINE protocols_flavor.protocols_flavor().vtgate_python_protocol(),NEWLINE self.vtgate_addrs[cell], 60)NEWLINENEWLINE def restart_mysql_task(self, tablet_name, task_name, is_alloc=False):NEWLINE # Delete the whole pod, which deletes mysql + vttablet tasks.NEWLINE os.system('kubectl delete pod %s --namespace=%s' % (NEWLINE self.get_tablet_pod_name(tablet_name), self.cluster_name))NEWLINE return 0NEWLINENEWLINE def wait_for_good_failover_status(NEWLINE self, keyspace, shard_name, failover_completion_timeout_s=60):NEWLINE return 0NEWLINENEWLINE def poll_for_varz(self, tablet_name, varz, timeout=60.0,NEWLINE condition_fn=None, converter=str, condition_msg=None):NEWLINE """Polls for varz to exist, or match specific conditions, within a timeout.NEWLINENEWLINE Args:NEWLINE tablet_name: the name of the process that we're trying to poll vars from.NEWLINE varz: name of the vars to fetch from varzNEWLINE timeout: number of seconds that we should attempt to poll for.NEWLINE condition_fn: a function that takes the var as input, and returns a truthyNEWLINE value if it matches the success conditions.NEWLINE converter: function to convert varz valueNEWLINE condition_msg: string describing the conditions that we're polling for,NEWLINE used for error messaging.NEWLINENEWLINE Raises:NEWLINE VitessEnvironmentError: Raised if the varz conditions aren't met withinNEWLINE the given timeout.NEWLINENEWLINE Returns:NEWLINE dict of requested varz.NEWLINE """NEWLINE start_time = time.time()NEWLINE while True:NEWLINE if (time.time() - start_time) >= timeout:NEWLINE timeout_error_msg = 'Timed out polling for varz.'NEWLINE if condition_fn and condition_msg:NEWLINE timeout_error_msg += ' Condition "%s" not met.' % condition_msgNEWLINE raise base_environment.VitessEnvironmentError(timeout_error_msg)NEWLINE hostname = self.get_tablet_ip_port(tablet_name)NEWLINE host_varz = subprocess.check_output([NEWLINE 'kubectl', 'exec', '-ti', self.get_tablet_pod_name(tablet_name),NEWLINE '--namespace=%s' % self.cluster_name,NEWLINE 'curl', '%s/debug/vars' % hostname])NEWLINE if not host_varz:NEWLINE continueNEWLINE host_varz = json.loads(host_varz)NEWLINE if condition_fn is None or condition_fn(host_varz):NEWLINE return host_varzNEWLINENEWLINE def wait_for_healthy_tablets(self):NEWLINE return 0NEWLINENEWLINE def get_tablet_pod_name(self, tablet_name):NEWLINE tablet_info = json.loads(self.vtctl_helper.execute_vtctl_command(NEWLINE ['GetTablet', tablet_name]))NEWLINE # Hostname is <pod_name>.vttabletNEWLINE return tablet_info['hostname'].split('.')[0]NEWLINENEWLINE def get_tablet_task_number(self, tablet_name):NEWLINE # Tablet pod name under StatefulSet isNEWLINE # "<cell>-<keyspace>-<shard_number>-<tablet_type>-<task_number>"NEWLINE # Example: test1-foo-0-replica-0.NEWLINE return int(self.get_tablet_pod_name(tablet_name).split('-')[-1])NEWLINENEWLINE def automatic_reparent_available(self):NEWLINE """Checks if the environment can automatically reparent."""NEWLINE p1 = subprocess.Popen(NEWLINE ['kubectl', 'get', 'pods', '--namespace=%s' % self.cluster_name],NEWLINE stdout=subprocess.PIPE)NEWLINE p2 = subprocess.Popen(NEWLINE ['grep', 'orchestrator'], stdin=p1.stdout, stdout=subprocess.PIPE)NEWLINE output = p2.communicate()[0]NEWLINE return bool(output)NEWLINENEWLINE def internal_reparent(self, keyspace, shard_name, new_master_uid,NEWLINE emergency=False):NEWLINE reparent_command = (NEWLINE 'EmergencyReparentShard' if emergency else 'PlannedReparentShard')NEWLINE self.vtctl_helper.execute_vtctl_command(NEWLINE [reparent_command, '-keyspace_shard', '%s/%s' % (keyspace, shard_name),NEWLINE '-new_master', new_master_uid])NEWLINE self.vtctl_helper.execute_vtctl_command(['RebuildKeyspaceGraph', keyspace])NEWLINE return 0, 'No output'NEWLINENEWLINE def backup(self, tablet_name):NEWLINE logging.info('Backing up tablet %s', tablet_name)NEWLINE self.vtctl_helper.execute_vtctl_command(['Backup', tablet_name])NEWLINENEWLINE def drain_tablet(self, tablet_name, duration_s=600):NEWLINE self.vtctl_helper.execute_vtctl_command(['StopSlave', tablet_name])NEWLINE self.vtctl_helper.execute_vtctl_command(NEWLINE ['ChangeSlaveType', tablet_name, 'drained'])NEWLINENEWLINE def is_tablet_drained(self, tablet_name):NEWLINE return self.get_tablet_type(tablet_name) == topodata_pb2.DRAINEDNEWLINENEWLINE def undrain_tablet(self, tablet_name):NEWLINE self.vtctl_helper.execute_vtctl_command(NEWLINE ['ChangeSlaveType', tablet_name, 'replica'])NEWLINE self.vtctl_helper.execute_vtctl_command(['StartSlave', tablet_name])NEWLINENEWLINE def is_tablet_undrained(self, tablet_name):NEWLINE return not self.is_tablet_drained(tablet_name)NEWLINENEWLINE def get_tablet_query_total_count(self, tablet_name):NEWLINE return self.poll_for_varz(NEWLINE tablet_name, ['Queries'])['Queries']['TotalCount']NEWLINENEWLINE |
'''NEWLINEWSGI config for server project.NEWLINENEWLINEIt exposes the WSGI callable as a module-level variable named ``application``.NEWLINENEWLINEFor more information on this file, seeNEWLINEhttps://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/NEWLINE'''NEWLINENEWLINEimport osNEWLINENEWLINEfrom django.core.wsgi import get_wsgi_applicationNEWLINENEWLINEos.environ.setdefault('DJANGO_SETTINGS_MODULE', 'server.settings')NEWLINENEWLINEapplication = get_wsgi_application()NEWLINE |
import osNEWLINEfrom pathlib import PathNEWLINEimport reNEWLINEimport dbusNEWLINEimport psutilNEWLINEfrom .bash import exec_bash, BashErrorNEWLINEfrom .log_utils import get_loggerNEWLINENEWLINENEWLINEclass CheckError(Exception):NEWLINE passNEWLINENEWLINENEWLINEdef is_ac_power_connected():NEWLINENEWLINE for power_source_path in Path("/sys/class/power_supply/").iterdir():NEWLINENEWLINE try:NEWLINENEWLINE with open(power_source_path / "type", 'r') as f:NEWLINE if f.read().strip() != "Mains":NEWLINE continueNEWLINENEWLINE with open(power_source_path / "online", 'r') as f:NEWLINE if f.read(1) == "1":NEWLINE return TrueNEWLINENEWLINE except IOError:NEWLINE continueNEWLINENEWLINE return FalseNEWLINENEWLINENEWLINEdef is_pat_available():NEWLINE try:NEWLINE exec_bash("grep -E '^flags.+ pat( |$)' /proc/cpuinfo")NEWLINE return TrueNEWLINE except BashError:NEWLINE return FalseNEWLINENEWLINENEWLINEdef get_active_renderer():NEWLINENEWLINE if _is_gl_provider_nvidia():NEWLINE return "nvidia"NEWLINE else:NEWLINE return "integrated"NEWLINENEWLINENEWLINEdef is_module_available(module_name):NEWLINENEWLINE logger = get_logger()NEWLINENEWLINE try:NEWLINE exec_bash("/usr/sbin/modinfo %s" % module_name)NEWLINE except BashError:NEWLINE logger.info("NOT Get modinfo %s" % module_name)NEWLINE return FalseNEWLINE else:NEWLINE logger.info("Get True modinfo %s" % module_name)NEWLINE return TrueNEWLINENEWLINEdef is_module_loaded_available(module_name):NEWLINENEWLINE logger = get_logger()NEWLINENEWLINE try:NEWLINE exec_bash("/usr/sbin/lsmod | grep %s" % module_name)NEWLINE except BashError:NEWLINE logger.warning("Not lsmod: %s" % module_name)NEWLINE return FalseNEWLINE else:NEWLINE logger.warning("True lsmod %s" % module_name)NEWLINE return TrueNEWLINENEWLINEdef is_module_loaded(module_name):NEWLINENEWLINE try:NEWLINE exec_bash("/usr/sbin/lsmod | grep -E \"^%s \"" % module_name)NEWLINE except BashError:NEWLINE return FalseNEWLINE else:NEWLINE return TrueNEWLINENEWLINEdef get_current_display_manager():NEWLINENEWLINE if not os.path.isfile("/etc/systemd/system/display-manager.service"):NEWLINE raise CheckError("No display-manager.service file found")NEWLINENEWLINE dm_service_path = os.path.realpath("/etc/systemd/system/display-manager.service")NEWLINE dm_service_filename = os.path.split(dm_service_path)[-1]NEWLINE dm_name = os.path.splitext(dm_service_filename)[0]NEWLINENEWLINE return dm_nameNEWLINENEWLINEdef _get_openrc_display_manager(init):NEWLINENEWLINE if not init == "openrc":NEWLINE return using_patched_GDM()NEWLINE else:NEWLINE passNEWLINENEWLINE if not os.path.isfile("/etc/init.d/xdm"):NEWLINE raise CheckError("No xdm init script fle found")NEWLINENEWLINE dm_service_path = os.path.realpath("/etc/init.d/xdm")NEWLINE dm_service_filename = os.path.split(dm_service_path)[-1]NEWLINE dm_name = os.path.splitext(dm_service_filename)[0]NEWLINENEWLINE return dm_nameNEWLINENEWLINEdef using_patched_GDM():NEWLINENEWLINE folder_path_1 = "/etc/gdm/Prime"NEWLINE folder_path_2 = "/etc/gdm3/Prime"NEWLINENEWLINE return os.path.isdir(folder_path_1) or os.path.isdir(folder_path_2)NEWLINENEWLINEdef check_offloading_available():NEWLINENEWLINE try:NEWLINE out = exec_bash("/usr/bin/xrandr --listproviders")NEWLINE except BashError as e:NEWLINE raise CheckError("Cannot list xrandr providers : %s" % str(e))NEWLINENEWLINE for line in out.splitlines():NEWLINE if re.search("^Provider [0-9]+:", line) and "name:NVIDIA-G0" in line:NEWLINE return TrueNEWLINE return FalseNEWLINENEWLINEdef get_integrated_provider():NEWLINENEWLINE try:NEWLINE provider = exec_bash("xrandr --listproviders | egrep -io \"name:.*AMD.*|name:.*Intel.*\" | sed 's/name://;s/^/\"/;s/$/\"/'")NEWLINE except BashError as e:NEWLINE raise CheckError("Cannot find Intel or AMD in xrandr providers : %s" % str(e))NEWLINE return providerNEWLINENEWLINEdef is_xorg_intel_module_available():NEWLINE return os.path.isfile("/usr/lib64/xorg/modules/drivers/intel_drv.so")NEWLINENEWLINEdef is_xorg_amdgpu_module_available():NEWLINE return os.path.isfile("/usr/lib64/xorg/modules/drivers/amdgpu_drv.so")NEWLINENEWLINENEWLINEdef is_login_manager_active():NEWLINE return _is_service_active("display-manager")NEWLINENEWLINENEWLINEdef is_daemon_active():NEWLINE return _is_service_active("optimus-manager")NEWLINENEWLINENEWLINEdef is_bumblebeed_service_active():NEWLINE return _is_service_active("bumblebeed")NEWLINENEWLINEdef list_processes_on_nvidia(bus_ids):NEWLINENEWLINE nvidia_id = bus_ids["nvidia"]NEWLINENEWLINE paths = [NEWLINE "/dev/nvidia",NEWLINE os.path.realpath(f"/dev/dri/by-path/pci-0000:{nvidia_id}-card"),NEWLINE os.path.realpath(f"/dev/dri/by-path/pci-0000:{nvidia_id}-render")NEWLINE ]NEWLINENEWLINE def _check_holds_nvidia(pid):NEWLINENEWLINE for fd_path in Path(f"/proc/{pid}/fd").iterdir():NEWLINE try:NEWLINE target = os.readlink(fd_path)NEWLINE for p in paths:NEWLINE if p in target:NEWLINE return TrueNEWLINE except FileNotFoundError:NEWLINE passNEWLINENEWLINE return FalseNEWLINENEWLINE processes = []NEWLINENEWLINE for proc in psutil.process_iter(["pid", "cmdline"]):NEWLINE try:NEWLINE if _check_holds_nvidia(proc.pid):NEWLINE cmdline = proc.cmdline()NEWLINE cmdline = cmdline[0] if len(cmdline) > 0 else ""NEWLINE processes.append({NEWLINE "pid": proc.pid,NEWLINE "cmdline":cmdlineNEWLINE })NEWLINE except PermissionError:NEWLINE passNEWLINENEWLINE return processesNEWLINENEWLINENEWLINEdef _is_gl_provider_nvidia():NEWLINENEWLINE try:NEWLINE out = exec_bash("__NV_PRIME_RENDER_OFFLOAD=0 glxinfo")NEWLINE except BashError as e:NEWLINE raise CheckError("Cannot run glxinfo : %s" % str(e))NEWLINENEWLINE for line in out.splitlines():NEWLINE if "server glx vendor string: NVIDIA Corporation" in line:NEWLINE return TrueNEWLINE return FalseNEWLINENEWLINEdef get_integrated_gpu():NEWLINENEWLINE try:NEWLINE exec_bash("glxinfo | awk '/Vendor:/{print $2}'| grep 'X.Org'")NEWLINE return "amd"NEWLINE except BashError:NEWLINE return "intel"NEWLINENEWLINEdef _is_service_active(service_name):NEWLINENEWLINE logger = get_logger()NEWLINENEWLINE try:NEWLINE system_bus = dbus.SystemBus()NEWLINE except dbus.exceptions.DBusException:NEWLINE logger.warning(NEWLINE "Cannot communicate with the DBus system bus to check status of %s."NEWLINE " Is DBus running ? Falling back to bash commands", service_name)NEWLINE return _is_service_active_bash(service_name)NEWLINE else:NEWLINE return _is_service_active_dbus(system_bus, service_name)NEWLINENEWLINEdef _is_service_active_dbus(system_bus, service_name):NEWLINENEWLINE systemd = system_bus.get_object("org.freedesktop.systemd1", "/org/freedesktop/systemd1")NEWLINENEWLINE try:NEWLINE unit_path = systemd.GetUnit("%s.service" % service_name, dbus_interface="org.freedesktop.systemd1.Manager")NEWLINE except dbus.exceptions.DBusException:NEWLINE return FalseNEWLINENEWLINE optimus_manager_interface = system_bus.get_object("org.freedesktop.systemd1", unit_path)NEWLINE properties_manager = dbus.Interface(optimus_manager_interface, 'org.freedesktop.DBus.Properties')NEWLINE state = properties_manager.Get("org.freedesktop.systemd1.Unit", "SubState")NEWLINENEWLINE return state == "running"NEWLINENEWLINENEWLINEdef _is_service_active_bash(service_name):NEWLINENEWLINE try:NEWLINE exec_bash("systemctl is-active %s" % service_name)NEWLINE except BashError:NEWLINE return FalseNEWLINE else:NEWLINE return TrueNEWLINE |
# -*- coding: utf-8 -*-NEWLINENEWLINE"""NEWLINEtaiga_ncurses.ui.signalsNEWLINE~~~~~~~~~~~~~~~~~~~~~~~~NEWLINE"""NEWLINENEWLINEimport urwidNEWLINENEWLINEconnect = urwid.connect_signalNEWLINEdisconnect = urwid.disconnect_signalNEWLINENEWLINEdef emit(widget, signal):NEWLINE widget._emit(signal)NEWLINE |
from django.core.cache import get_cacheNEWLINEfrom avocado.conf import settingsNEWLINEfrom .model import instance_cache_key, NEVER_EXPIRENEWLINENEWLINENEWLINEdef post_save_cache(sender, instance, **kwargs):NEWLINE """General post-save handler for caching model instances. NOTE: This mustNEWLINE be used in conjunction with the `pre_delete_uncache` since the cache is setNEWLINE to never expire.NEWLINE """NEWLINE cache = get_cache(settings.DATA_CACHE)NEWLINE cache.set(instance_cache_key(instance), instance, timeout=NEVER_EXPIRE)NEWLINENEWLINENEWLINEdef pre_delete_uncache(sender, instance, **kwargs):NEWLINE "General post-delete handler for removing cache for model instances."NEWLINE cache = get_cache(settings.DATA_CACHE)NEWLINE cache.delete(instance_cache_key(instance))NEWLINE |
try:NEWLINE from django.conf.urls import url, includeNEWLINEexcept ImportError:NEWLINE from django.urls import url, includeNEWLINENEWLINEurlpatterns = [NEWLINE url(r'^', include('notify.urls', namespace='notifications')),NEWLINE]NEWLINE |
from __future__ import print_function, absolute_import, divisionNEWLINENEWLINEfrom timeit import default_timer as timerNEWLINEimport numpy as npNEWLINENEWLINEfrom .reduction import device_reduce_sumNEWLINENEWLINENEWLINEdef benchmark_intp(nelem):NEWLINE data = np.random.randint(0, 100, nelem).astype(np.intp)NEWLINENEWLINE ts = timer()NEWLINE expected_res = data.sum()NEWLINE cpu_time = timer() - tsNEWLINENEWLINE ts = timer()NEWLINE got_res = device_reduce_sum(data)NEWLINE gpu_time = timer() - tsNEWLINENEWLINE assert got_res == expected_resNEWLINE return cpu_time, gpu_timeNEWLINENEWLINENEWLINEdef benchmark_float64(nelem):NEWLINE data = np.random.random(nelem).astype(np.float64)NEWLINENEWLINE ts = timer()NEWLINE expected_res = data.sum()NEWLINE cpu_time = timer() - tsNEWLINENEWLINE ts = timer()NEWLINE got_res = device_reduce_sum(data)NEWLINE gpu_time = timer() - tsNEWLINENEWLINE np.allclose(got_res, expected_res)NEWLINE return cpu_time, gpu_timeNEWLINENEWLINENEWLINEdef main():NEWLINE print('benchmark intp'.center(80, '='))NEWLINE for n in [100, 1000, 10000, 100000, 1000000, 10000000]:NEWLINE print('n = {0}'.format(n))NEWLINE for t in range(3):NEWLINE print(benchmark_intp(n))NEWLINENEWLINE print('benchmark float64'.center(80, '='))NEWLINE for n in [100, 1000, 10000, 100000, 1000000, 10000000]:NEWLINE print('n = {0}'.format(n))NEWLINE for t in range(3):NEWLINE print(benchmark_float64(n))NEWLINENEWLINE # Note: On Carrizo, speedup is attained at n=1,000,000NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE main()NEWLINE |
#Escreva um programa que leia o nome completo de uma pessoa, mostrando em seguida o primeiro e o último nome separadamente.NEWLINE#Ex.: Ana Maria de SouzaNEWLINE#primeiro = AnaNEWLINE#último = SouzaNEWLINEnome = str(input('Digite seu nome: ')).strip()NEWLINEs = nome.split()NEWLINEprint('Seu primeiro nome é {}'.format(s[0]))NEWLINE#print('Seu último nome é {}'.format(s[len(s)-1]))NEWLINEprint('Seu último nome é {}'.format(s[-1]))NEWLINE |
# model settingsNEWLINEmodel = dict(NEWLINE type="RPN",NEWLINE pretrained="open-mmlab://resnet50_caffe",NEWLINE backbone=dict(NEWLINE type="ResNet",NEWLINE depth=50,NEWLINE num_stages=4,NEWLINE out_indices=(0, 1, 2, 3),NEWLINE frozen_stages=1,NEWLINE norm_cfg=dict(type="BN", requires_grad=False),NEWLINE norm_eval=True,NEWLINE style="caffe",NEWLINE ),NEWLINE neck=dict(NEWLINE type="FPN", in_channels=[256, 512, 1024, 2048], out_channels=256, num_outs=5NEWLINE ),NEWLINE rpn_head=dict(NEWLINE type="GARPNHead",NEWLINE in_channels=256,NEWLINE feat_channels=256,NEWLINE octave_base_scale=8,NEWLINE scales_per_octave=3,NEWLINE octave_ratios=[0.5, 1.0, 2.0],NEWLINE anchor_strides=[4, 8, 16, 32, 64],NEWLINE anchor_base_sizes=None,NEWLINE anchoring_means=[0.0, 0.0, 0.0, 0.0],NEWLINE anchoring_stds=[0.07, 0.07, 0.14, 0.14],NEWLINE target_means=(0.0, 0.0, 0.0, 0.0),NEWLINE target_stds=[0.07, 0.07, 0.11, 0.11],NEWLINE loc_filter_thr=0.01,NEWLINE loss_loc=dict(NEWLINE type="FocalLoss", use_sigmoid=True, gamma=2.0, alpha=0.25, loss_weight=1.0NEWLINE ),NEWLINE loss_shape=dict(type="BoundedIoULoss", beta=0.2, loss_weight=1.0),NEWLINE loss_cls=dict(type="CrossEntropyLoss", use_sigmoid=True, loss_weight=1.0),NEWLINE loss_bbox=dict(type="SmoothL1Loss", beta=1.0, loss_weight=1.0),NEWLINE ),NEWLINE)NEWLINE# model training and testing settingsNEWLINEtrain_cfg = dict(NEWLINE rpn=dict(NEWLINE ga_assigner=dict(NEWLINE type="ApproxMaxIoUAssigner",NEWLINE pos_iou_thr=0.7,NEWLINE neg_iou_thr=0.3,NEWLINE min_pos_iou=0.3,NEWLINE ignore_iof_thr=-1,NEWLINE ),NEWLINE ga_sampler=dict(NEWLINE type="RandomSampler",NEWLINE num=256,NEWLINE pos_fraction=0.5,NEWLINE neg_pos_ub=-1,NEWLINE add_gt_as_proposals=False,NEWLINE ),NEWLINE assigner=dict(NEWLINE type="MaxIoUAssigner",NEWLINE pos_iou_thr=0.7,NEWLINE neg_iou_thr=0.3,NEWLINE min_pos_iou=0.3,NEWLINE ignore_iof_thr=-1,NEWLINE ),NEWLINE sampler=dict(NEWLINE type="RandomSampler",NEWLINE num=256,NEWLINE pos_fraction=0.5,NEWLINE neg_pos_ub=-1,NEWLINE add_gt_as_proposals=False,NEWLINE ),NEWLINE allowed_border=-1,NEWLINE pos_weight=-1,NEWLINE center_ratio=0.2,NEWLINE ignore_ratio=0.5,NEWLINE debug=False,NEWLINE )NEWLINE)NEWLINEtest_cfg = dict(NEWLINE rpn=dict(NEWLINE nms_across_levels=False,NEWLINE nms_pre=2000,NEWLINE nms_post=2000,NEWLINE max_num=2000,NEWLINE nms_thr=0.7,NEWLINE min_bbox_size=0,NEWLINE )NEWLINE)NEWLINE# dataset settingsNEWLINEdataset_type = "CocoDataset"NEWLINEdata_root = "data/coco/"NEWLINEimg_norm_cfg = dict(NEWLINE mean=[102.9801, 115.9465, 122.7717], std=[1.0, 1.0, 1.0], to_rgb=FalseNEWLINE)NEWLINEtrain_pipeline = [NEWLINE dict(type="LoadImageFromFile"),NEWLINE dict(type="LoadAnnotations", with_bbox=True, with_label=False),NEWLINE dict(type="Resize", img_scale=(1333, 800), keep_ratio=True),NEWLINE dict(type="RandomFlip", flip_ratio=0.5),NEWLINE dict(type="Normalize", **img_norm_cfg),NEWLINE dict(type="Pad", size_divisor=32),NEWLINE dict(type="DefaultFormatBundle"),NEWLINE dict(type="Collect", keys=["img", "gt_bboxes"]),NEWLINE]NEWLINEtest_pipeline = [NEWLINE dict(type="LoadImageFromFile"),NEWLINE dict(NEWLINE type="MultiScaleFlipAug",NEWLINE img_scale=(1333, 800),NEWLINE flip=False,NEWLINE transforms=[NEWLINE dict(type="Resize", keep_ratio=True),NEWLINE dict(type="RandomFlip"),NEWLINE dict(type="Normalize", **img_norm_cfg),NEWLINE dict(type="Pad", size_divisor=32),NEWLINE dict(type="ImageToTensor", keys=["img"]),NEWLINE dict(type="Collect", keys=["img"]),NEWLINE ],NEWLINE ),NEWLINE]NEWLINEdata = dict(NEWLINE imgs_per_gpu=2,NEWLINE workers_per_gpu=2,NEWLINE train=dict(NEWLINE type=dataset_type,NEWLINE ann_file=data_root + "annotations/instances_train2017.json",NEWLINE img_prefix=data_root + "train2017/",NEWLINE pipeline=train_pipeline,NEWLINE ),NEWLINE val=dict(NEWLINE type=dataset_type,NEWLINE ann_file=data_root + "annotations/instances_val2017.json",NEWLINE img_prefix=data_root + "val2017/",NEWLINE pipeline=test_pipeline,NEWLINE ),NEWLINE test=dict(NEWLINE type=dataset_type,NEWLINE ann_file=data_root + "annotations/instances_val2017.json",NEWLINE img_prefix=data_root + "val2017/",NEWLINE pipeline=test_pipeline,NEWLINE ),NEWLINE)NEWLINEevaluation = dict(interval=1, metric="proposal_fast")NEWLINE# optimizerNEWLINEoptimizer = dict(type="SGD", lr=0.02, momentum=0.9, weight_decay=0.0001)NEWLINE# runner configsNEWLINEoptimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2))NEWLINElr_config = dict(NEWLINE policy="step", warmup="linear", warmup_iters=500, warmup_ratio=1.0 / 3, step=[8, 11]NEWLINE)NEWLINEcheckpoint_config = dict(interval=1)NEWLINE# yapf:disableNEWLINElog_config = dict(NEWLINE interval=50,NEWLINE hooks=[NEWLINE dict(type="TextLoggerHook"),NEWLINE # dict(type='TensorboardLoggerHook')NEWLINE ],NEWLINE)NEWLINE# yapf:enableNEWLINE# runtime settingsNEWLINEtotal_epochs = 12NEWLINEdist_params = dict(backend="nccl")NEWLINElog_level = "INFO"NEWLINEwork_dir = "./work_dirs/ga_rpn_r50_caffe_fpn_1x"NEWLINEload_from = NoneNEWLINEresume_from = NoneNEWLINEworkflow = [("train", 1)]NEWLINE |
"""NEWLINEDataset module for managing text datasets.NEWLINE"""NEWLINE__author__ = 'victor'NEWLINEfrom collections import OrderedDictNEWLINEimport randomNEWLINEimport numpy as npNEWLINENEWLINENEWLINEclass InvalidFieldsException(Exception):NEWLINE passNEWLINENEWLINENEWLINEclass Dataset(object):NEWLINE """NEWLINE Generic Dataset object that encapsulates a list of instances.NEWLINENEWLINE The dataset stores the instances in an ordered dictionary of fields.NEWLINE Each field maps to a list, the ith element of the list for field 'foo' corresponds to the attribute 'foo' for the ith instance in the dataset.NEWLINENEWLINE The dataset object supports indexing, iterating, slicing (eg. for iterating over batches), shuffling,NEWLINE conversion to/from CONLL format, among others.NEWLINENEWLINE Example:NEWLINENEWLINE .. code-block:: pythonNEWLINENEWLINE d = Dataset({'Name': ['Alice', 'Bob', 'Carol', 'David', 'Ellen'], 'SSN': [1, 23, 45, 56, 7890]})NEWLINE print(d) # Dataset(Name, SSN)NEWLINE print(d[2]) # OrderedDict([('SSN', 45), ('Name', 'Carol')])NEWLINE print(d[1:3]) # OrderedDict([('SSN', [23, 45]), ('Name', ['Bob', 'Carol'])])NEWLINENEWLINE for e in d:NEWLINE print(e) # OrderedDict([('SSN', 1), ('Name', 'Alice')]) ...NEWLINE """NEWLINENEWLINE def __init__(self, fields):NEWLINE """NEWLINE :param fields: An ordered dictionary in which a key is the name of an attribute and a value is a list of the values of the instances in the dataset.NEWLINENEWLINE :return: A Dataset objectNEWLINE """NEWLINE self.fields = OrderedDict(fields)NEWLINE length = NoneNEWLINE length_field = NoneNEWLINE for name, d in fields.items():NEWLINE if length is None:NEWLINE length = len(d)NEWLINE length_field = nameNEWLINE else:NEWLINE if len(d) != length:NEWLINE raise InvalidFieldsException('field {} has length {} but field {} has length {}'.format(length_field, length, name, len(d)))NEWLINENEWLINE def __len__(self):NEWLINE """NEWLINE :return: The number of instances in the dataset.NEWLINE """NEWLINE if len(self.fields) == 0:NEWLINE return 0NEWLINE return len(self.fields.values()[0])NEWLINENEWLINE def __repr__(self):NEWLINE return "{}({})".format(self.__class__.__name__, ', '.join(self.fields.keys()))NEWLINENEWLINE @classmethodNEWLINE def load_conll(cls, fname):NEWLINE """NEWLINE The CONLL file must have a tab delimited header, for example::NEWLINENEWLINE # description tagsNEWLINE AliceNEWLINE Hello t1NEWLINE my t2NEWLINE name t3NEWLINE is t4NEWLINE alice t5NEWLINENEWLINE BobNEWLINE I'm t1NEWLINE bob t2NEWLINENEWLINE Here, the fields are `description` and `tags`. The first instance has the label `Alice` and theNEWLINE description `['Hello', 'my', 'name', 'is', 'alice']` and the tags `['t1', 't2', 't3', 't4', 't5']`.NEWLINE The second instance has the label `Bob` and the description `["I'm", 'bob']` and the tags `['t1', 't2']`.NEWLINENEWLINE :param fname: The CONLL formatted file from which to load the datasetNEWLINENEWLINE :return: loaded Dataset instanceNEWLINE """NEWLINE def process_cache(cache, fields):NEWLINE cache = [l.split() for l in cache if l]NEWLINE if not cache:NEWLINE return NoneNEWLINE fields['label'].append(cache[0][0])NEWLINE instance = {k: [] for k in fields if k != 'label'}NEWLINE for l in cache[1:]:NEWLINE for i, k in enumerate(fields):NEWLINE if k != 'label':NEWLINE instance[k].append(None if l[i] == '-' else l[i])NEWLINE for k, v in instance.items():NEWLINE fields[k].append(v)NEWLINENEWLINE cache = []NEWLINENEWLINE with open(fname) as f:NEWLINE header = f.next().strip().split('\t')NEWLINE header[0] = header[0].lstrip('# ')NEWLINE fields = OrderedDict([(head, []) for head in header])NEWLINE fields['label'] = []NEWLINE for line in f:NEWLINE line = line.strip()NEWLINE if line:NEWLINE cache.append(line)NEWLINE else:NEWLINE # met empty line, process cacheNEWLINE process_cache(cache, fields)NEWLINE cache = []NEWLINE if cache:NEWLINE process_cache(cache, fields)NEWLINE return cls(fields)NEWLINENEWLINE def write_conll(self, fname):NEWLINE """NEWLINE Serializes the dataset in CONLL format to fnameNEWLINE """NEWLINE if 'label' not in self.fields:NEWLINE raise InvalidFieldsException("dataset is not in CONLL format: missing label field")NEWLINENEWLINE def instance_to_conll(inst):NEWLINE tab = [v for k, v in inst.items() if k != 'label']NEWLINE return '{}\n{}'.format(inst['label'], '\n'.join(['\t'.join(['-' if e is None else str(e) for e in row]) for row in zip(*tab)]))NEWLINENEWLINE with open(fname, 'wb') as f:NEWLINE f.write('# {}'.format('\t'.join([k for k in self.fields if k != 'label'])))NEWLINE for i, d in enumerate(self):NEWLINE f.write('\n{}'.format(instance_to_conll(d)))NEWLINE if i != len(self) - 1:NEWLINE f.write('\n')NEWLINENEWLINE def convert(self, converters, in_place=False):NEWLINE """NEWLINE Applies transformations to the dataset.NEWLINENEWLINE :param converters: A dictionary specifying the function to apply to each field. If a field is missing from the dictionary, then it will not be transformed.NEWLINENEWLINE :param in_place: Whether to perform the transformation in place or create a new dataset instanceNEWLINENEWLINE :return: the transformed dataset instanceNEWLINE """NEWLINE dataset = self if in_place else self.__class__(OrderedDict([(name, data[:]) for name, data in self.fields.items()]))NEWLINE for name, convert in converters.items():NEWLINE if name not in self.fields.keys():NEWLINE raise InvalidFieldsException('Converter specified for non-existent field {}'.format(name))NEWLINE for i, d in enumerate(dataset.fields[name]):NEWLINE dataset.fields[name][i] = convert(d)NEWLINE return datasetNEWLINENEWLINE def shuffle(self):NEWLINE """NEWLINE Re-indexes the dataset in random orderNEWLINENEWLINE :return: the shuffled dataset instanceNEWLINE """NEWLINE order = range(len(self))NEWLINE random.shuffle(order)NEWLINE for name, data in self.fields.items():NEWLINE reindexed = []NEWLINE for _, i in enumerate(order):NEWLINE reindexed.append(data[i])NEWLINE self.fields[name] = reindexedNEWLINE return selfNEWLINENEWLINE def __getitem__(self, item):NEWLINE """NEWLINE :param item: An integer index or a slice (eg. 2, 1:, 1:5)NEWLINENEWLINE :return: an ordered dictionary of the instance(s) at index/indices `item`.NEWLINE """NEWLINE return OrderedDict([(name, data[item]) for name, data in self.fields.items()])NEWLINENEWLINE def __setitem__(self, key, value):NEWLINE """NEWLINE :param key: An integer index or a slice (eg. 2, 1:, 1:5)NEWLINENEWLINE :param value: Sets the instances at index/indices `key` to the instances(s) `value`NEWLINE """NEWLINE for name, data in self.fields.items():NEWLINE if name not in value:NEWLINE raise InvalidFieldsException('field {} is missing in input data: {}'.format(name, value))NEWLINE data[key] = value[name]NEWLINENEWLINE def __iter__(self):NEWLINE """NEWLINE :return: A iterator over the instances in the datasetNEWLINE """NEWLINE for i in xrange(len(self)):NEWLINE yield self[i]NEWLINENEWLINE def copy(self, keep_fields=None):NEWLINE """NEWLINE :param keep_fields: if specified, then only the given fields will be keptNEWLINE :return: A deep copy of the dataset (each instance is copied).NEWLINE """NEWLINE keep_fields = self.fields.keys() or keep_fieldsNEWLINE return self.__class__(OrderedDict([(name, data[:]) for name, data in self.fields.items() if name in keep_fields]))NEWLINENEWLINE @classmethodNEWLINE def pad(cls, sequences, padding, pad_len=None):NEWLINE """NEWLINE Pads a list of sequences such that they form a matrix.NEWLINENEWLINE :param sequences: a list of sequences of varying lengths.NEWLINE :param padding: the value of padded cells.NEWLINE :param pad_len: the length of the maximum padded sequence.NEWLINE """NEWLINE max_len = max([len(s) for s in sequences])NEWLINE pad_len = pad_len or max_lenNEWLINE assert pad_len >= max_len, 'pad_len {} must be greater or equal to the longest sequence {}'.format(pad_len, max_len)NEWLINE for i, s in enumerate(sequences):NEWLINE sequences[i] = [padding] * (pad_len - len(s)) + sNEWLINE return np.array(sequences)NEWLINE |
from Find_the_last_Fibonacci_digit_hardcore_version_6_kyu import last_fib_digitNEWLINEimport unittestNEWLINENEWLINEclass Fibonacci(unittest.TestCase):NEWLINE def test_1(self):NEWLINE n = 7000006NEWLINE result = 3NEWLINE self.assertEqual(last_fib_digit(n), result)NEWLINE def test_1(self):NEWLINE n = 9000000008NEWLINE result = 4NEWLINE self.assertEqual(last_fib_digit(n), result) NEWLINENEWLINEif __name__ == "__main__":NEWLINE unittest.main() |
from typing import ListNEWLINENEWLINEfrom bleak.backends.service import BleakGATTServiceNEWLINEfrom bleak.backends.bluezdbus.characteristic import BleakGATTCharacteristicBlueZDBusNEWLINENEWLINENEWLINEclass BleakGATTServiceBlueZDBus(BleakGATTService):NEWLINE """GATT Service implementation for the BlueZ DBus backend"""NEWLINENEWLINE def __init__(self, obj, path):NEWLINE super().__init__(obj)NEWLINE self.__characteristics = []NEWLINE self.__path = pathNEWLINENEWLINE @propertyNEWLINE def uuid(self) -> str:NEWLINE """The UUID to this service"""NEWLINE return self.obj["UUID"]NEWLINENEWLINE @propertyNEWLINE def characteristics(self) -> List[BleakGATTCharacteristicBlueZDBus]:NEWLINE """List of characteristics for this service"""NEWLINE return self.__characteristicsNEWLINENEWLINE def add_characteristic(self, characteristic: BleakGATTCharacteristicBlueZDBus):NEWLINE """Add a :py:class:`~BleakGATTCharacteristicBlueZDBus` to the service.NEWLINENEWLINE Should not be used by end user, but rather by `bleak` itself.NEWLINE """NEWLINE self.__characteristics.append(characteristic)NEWLINENEWLINE @propertyNEWLINE def path(self):NEWLINE """The DBus path. Mostly needed by `bleak`, not by end user"""NEWLINE return self.__pathNEWLINE |
# Generated by Django 2.2.8 on 2019-12-23 10:46NEWLINENEWLINEimport datetimeNEWLINEfrom django.db import migrations, modelsNEWLINEfrom django.utils.timezone import utcNEWLINENEWLINENEWLINEclass Migration(migrations.Migration):NEWLINENEWLINE initial = TrueNEWLINENEWLINE dependencies = [NEWLINE ]NEWLINENEWLINE operations = [NEWLINE migrations.CreateModel(NEWLINE name='BulletinUpdate',NEWLINE fields=[NEWLINE ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),NEWLINE ('time_of_quake', models.DateTimeField(default=datetime.datetime(1900, 1, 1, 0, 0, tzinfo=utc))),NEWLINE ('url', models.URLField(unique=True)),NEWLINE ('latitude', models.DecimalField(decimal_places=2, max_digits=5)),NEWLINE ('longitude', models.DecimalField(decimal_places=2, max_digits=5)),NEWLINE ('depth', models.DecimalField(decimal_places=1, max_digits=4)),NEWLINE ('magnitude', models.DecimalField(decimal_places=1, max_digits=3)),NEWLINE ('location', models.CharField(default='', max_length=2048)),NEWLINE ],NEWLINE options={NEWLINE 'ordering': ['-time_of_quake'],NEWLINE },NEWLINE ),NEWLINE migrations.AddIndex(NEWLINE model_name='bulletinupdate',NEWLINE index=models.Index(fields=['time_of_quake'], name='bulletin_bu_time_of_9e0643_idx'),NEWLINE ),NEWLINE migrations.AddIndex(NEWLINE model_name='bulletinupdate',NEWLINE index=models.Index(fields=['url'], name='bulletin_bu_url_4b9def_idx'),NEWLINE ),NEWLINE ]NEWLINE |
from functools import wrapsNEWLINENEWLINEfrom flask import Flask, render_template, url_for, redirect, request, session, flash, get_flashed_messages, abort, BlueprintNEWLINEfrom sqlalchemy import create_engine, or_, and_NEWLINEfrom sqlalchemy.orm import sessionmakerNEWLINEfrom werkzeug.security import check_password_hash, generate_password_hashNEWLINENEWLINEfrom db_models import User, BookNEWLINENEWLINENEWLINEapp = Flask(__name__)NEWLINEapp.config.from_object("config.ProductionConfig")NEWLINENEWLINENEWLINEdef redirect_url(default='index'):NEWLINE return request.args.get('next') or \NEWLINE request.referrer or \NEWLINE url_for(default)NEWLINENEWLINENEWLINEdef get_session(echo=False):NEWLINE engine = create_engine('sqlite:///db/database.db', echo=echo)NEWLINE db_session = sessionmaker(bind=engine)NEWLINE return db_session()NEWLINENEWLINENEWLINEdef get_default_context():NEWLINE permission_lvl = get_user_auth_lvl()NEWLINE context = {'permission_lvl': permission_lvl,NEWLINE 'user_id': session.get('user_id'),NEWLINE 'username': session.get('username')}NEWLINE return contextNEWLINENEWLINENEWLINEdef get_user_auth_lvl():NEWLINE if not session.get('user_id'):NEWLINE return 3NEWLINENEWLINE db_session = get_session()NEWLINE user_data = db_session.query(User.permission_lvl) \NEWLINE .filter(User.username == session.get('username')) \NEWLINE .one()NEWLINE return user_data.permission_lvlNEWLINENEWLINENEWLINEdef get_borrow_limit(username):NEWLINE db_session = get_session()NEWLINE user_data = db_session.query(User.book_limit) \NEWLINE .filter(User.username == username) \NEWLINE .one()NEWLINE return user_data.book_limitNEWLINENEWLINENEWLINEdef get_borrowed(username):NEWLINE db_session = get_session()NEWLINE return db_session.query(Book).join(User).filter(User.username == username).all()NEWLINENEWLINENEWLINEdef count_borrowed(username):NEWLINE return len(get_borrowed(username))NEWLINENEWLINENEWLINEdef user_exists(username):NEWLINE db_session = get_session()NEWLINE return db_session.query(User.id).filter_by(username=username).scalar() is not NoneNEWLINENEWLINENEWLINEdef is_able_to_borrow(username):NEWLINE borrow_limit = get_borrow_limit(username)NEWLINE if borrow_limit == 0:NEWLINE return TrueNEWLINE if count_borrowed(username) < borrow_limit:NEWLINE return TrueNEWLINE return FalseNEWLINENEWLINENEWLINEdef is_borrowed(book_id):NEWLINE db_session = get_session()NEWLINE result = db_session.query(Book) \NEWLINE .filter(and_(Book.id == book_id, Book.user_id == None)) \NEWLINE .scalar()NEWLINENEWLINE return True if result is None else FalseNEWLINENEWLINENEWLINEdef login_required(lvl=3):NEWLINE def frame(func):NEWLINE @wraps(func)NEWLINE def wrapped_func(*args, **kwargs):NEWLINE if get_user_auth_lvl() <= lvl:NEWLINE return func(*args, **kwargs)NEWLINE else:NEWLINE flash('Access denied')NEWLINE return redirect('/login')NEWLINE return wrapped_funcNEWLINENEWLINE return [email protected]('/')[email protected]('/index')NEWLINEdef index():NEWLINE context = get_default_context()NEWLINE return render_template('index.html', **context)[email protected]('/catalog/books')NEWLINEdef books():NEWLINE db_session = get_session()NEWLINE all_books = db_session.query(Book).all()NEWLINENEWLINE context = get_default_context()NEWLINE context['books'] = all_booksNEWLINENEWLINE return render_template('catalog.html', **context)[email protected]('/catalog/books/<int:book_id>')NEWLINEdef book(book_id):NEWLINE db_session = get_session()NEWLINE mybook = db_session.query(Book).filter(Book.id == book_id).scalar()NEWLINENEWLINE if not mybook:NEWLINE abort(404)NEWLINENEWLINE context = get_default_context()NEWLINE context['book'] = mybookNEWLINE return render_template('book.html', **context)[email protected]('/catalog/books/add_book', methods=['GET', 'POST'])NEWLINE@login_required(lvl=1)NEWLINEdef add_book():NEWLINE if request.method == 'GET':NEWLINE context = get_default_context()NEWLINE return render_template('add_book.html', **context)NEWLINENEWLINE if request.method == 'POST':NEWLINE db_session = get_session()NEWLINENEWLINE new_book = Book()NEWLINE new_book.isbn = request.form['isbn']NEWLINE new_book.author = request.form['author']NEWLINE new_book.description = request.form['description']NEWLINE new_book.isbn = request.form['isbn']NEWLINE new_book.pages = request.form['pages']NEWLINE new_book.published = request.form['published']NEWLINE new_book.publisher = request.form['publisher']NEWLINE new_book.subtitle = request.form['subtitle']NEWLINE new_book.title = request.form['title']NEWLINE new_book.website = request.form['website']NEWLINENEWLINE db_session.add(new_book)NEWLINE db_session.commit()NEWLINENEWLINE return redirect(url_for('books'))[email protected]('/catalog/books/<int:book_id>/edit', methods=['GET', 'POST'])NEWLINE@login_required(lvl=1)NEWLINEdef edit_book(book_id):NEWLINE if request.method == 'GET':NEWLINE db_session = get_session()NEWLINE mybook = db_session.query(Book).filter(Book.id == book_id).scalar()NEWLINENEWLINE if not mybook:NEWLINE abort(404)NEWLINENEWLINE context = get_default_context()NEWLINE context['book'] = mybookNEWLINE return render_template('edit_book.html', **context)NEWLINENEWLINE if request.method == 'POST':NEWLINE db_session = get_session()NEWLINENEWLINE book = db_session.query(Book).filter(Book.id == book_id).one()NEWLINE book.isbn = request.form['isbn']NEWLINE book.author = request.form['author']NEWLINE book.description = request.form['description']NEWLINE book.isbn = request.form['isbn']NEWLINE book.pages = request.form['pages']NEWLINE book.published = request.form['published']NEWLINE book.publisher = request.form['publisher']NEWLINE book.subtitle = request.form['subtitle']NEWLINE book.title = request.form['title']NEWLINE book.website = request.form['website']NEWLINENEWLINE db_session.commit()NEWLINENEWLINE return redirect(f'/catalog/books/{book_id}')[email protected]('/catalog/books/remove_book', methods=['GET', 'POST'])NEWLINE@login_required(lvl=1)NEWLINEdef remove_book():NEWLINE if request.method == 'GET':NEWLINE context = get_default_context()NEWLINE return render_template('remove_book.html', **context)NEWLINENEWLINE if request.method == 'POST':NEWLINE book_id = request.form['book_id']NEWLINENEWLINE db_session = get_session()NEWLINE db_session.query(Book). \NEWLINE filter(Book.id == book_id) \NEWLINE .delete()NEWLINENEWLINE db_session.commit()NEWLINENEWLINE return redirect(url_for('books'))[email protected]('/login', methods=['GET', 'POST'])NEWLINEdef login():NEWLINE if request.method == 'GET':NEWLINE context = get_default_context()NEWLINE context['messages'] = get_flashed_messages()NEWLINENEWLINE return render_template('login.html', **context)NEWLINENEWLINE if request.method == 'POST':NEWLINE db_session = get_session()NEWLINENEWLINE username = request.form['username']NEWLINE pwd = request.form['password']NEWLINENEWLINE user_data = db_session.query(User).filter(User.username == username).scalar()NEWLINENEWLINE if user_data:NEWLINE hashed_password = user_data.passwordNEWLINENEWLINE if check_password_hash(hashed_password, pwd):NEWLINE session['user_id'] = user_data.idNEWLINE session['username'] = user_data.usernameNEWLINE return redirect(url_for('index'))NEWLINENEWLINE flash('Wrong email or password.')NEWLINE return redirect(url_for('login'))[email protected]('/register', methods=['GET', 'POST'])NEWLINEdef register():NEWLINE if request.method == 'GET':NEWLINE context = get_default_context()NEWLINE context['messages'] = get_flashed_messages()NEWLINENEWLINE return render_template('register.html', **context)NEWLINENEWLINE if request.method == 'POST':NEWLINE db_session = get_session()NEWLINENEWLINE username = request.form['username']NEWLINE pwd = request.form['password']NEWLINE pwd_repeat = request.form['repeat_password']NEWLINE email = request.form['email']NEWLINE permission_lvl = 2NEWLINE book_limit = 5NEWLINENEWLINE if db_session.query(User).filter(User.username == username).scalar():NEWLINE flash('User already exists')NEWLINE return redirect(url_for('register'))NEWLINENEWLINE if db_session.query(User).filter(User.email == email).scalar():NEWLINE flash('Email already exists')NEWLINE return redirect(url_for('register'))NEWLINENEWLINE if pwd != pwd_repeat:NEWLINE flash('Password doesn\'t match')NEWLINE return redirect(url_for('register'))NEWLINENEWLINE new_user = User()NEWLINE new_user.username = usernameNEWLINE new_user.password = generate_password_hash(pwd)NEWLINE new_user.email = emailNEWLINE new_user.permission_lvl = permission_lvlNEWLINE new_user.book_limit = book_limitNEWLINENEWLINE db_session.add(new_user)NEWLINE db_session.commit()NEWLINENEWLINE return redirect(url_for('login'))[email protected]('/logout')NEWLINEdef logout():NEWLINE session.clear()NEWLINE return redirect(url_for('login'))[email protected]('/page_not_found')NEWLINEdef page_not_found():NEWLINE return render_template('404.html')[email protected]('/catalog/books/borrow', methods=["POST"])NEWLINE@login_required(lvl=2)NEWLINEdef borrow_book():NEWLINE db_session = get_session()NEWLINE book_id = request.form['borrow_book_id']NEWLINENEWLINE if is_borrowed(book_id):NEWLINE return redirect(url_for('book', book_id=book_id))NEWLINENEWLINE if not is_able_to_borrow(session.get('username')):NEWLINE return redirect(url_for('book', book_id=book_id))NEWLINENEWLINE db_session.query(Book) \NEWLINE .filter(Book.id == book_id) \NEWLINE .update({'user_id': session.get('user_id')})NEWLINE db_session.commit()NEWLINENEWLINE return redirect(redirect_url())[email protected]('/catalog/books/return', methods=["POST"])NEWLINE@login_required(lvl=2)NEWLINEdef return_book():NEWLINE db_session = get_session()NEWLINE book_id = request.form['return_book_id']NEWLINENEWLINE db_session.query(Book) \NEWLINE .filter(Book.id == book_id) \NEWLINE .update({'user_id': None})NEWLINE db_session.commit()NEWLINENEWLINE return redirect(redirect_url())[email protected]('/users')NEWLINE@login_required(lvl=1)NEWLINEdef users():NEWLINE db_session = get_session()NEWLINE users = []NEWLINENEWLINE all_users = db_session.query(User).all()NEWLINE for user in all_users:NEWLINE count_books = db_session.query(Book).filter(Book.user_id == user.id).count()NEWLINE users.append({'username': user.username, 'count_books': count_books})NEWLINENEWLINE context = get_default_context()NEWLINE context['users'] = usersNEWLINENEWLINE return render_template('users.html', **context)[email protected]('/users/user/<username>')NEWLINE@login_required(lvl=2)NEWLINEdef user_profile(username):NEWLINE context = get_default_context()NEWLINENEWLINE if context['permission_lvl'] == 1:NEWLINE passNEWLINE elif username != context['username']:NEWLINE abort(401)NEWLINENEWLINE if not user_exists(username):NEWLINE abort(404)NEWLINENEWLINE context['profile_username'] = usernameNEWLINE context['borrowed_books'] = get_borrowed(username)NEWLINE context['current_user'] = username == session.get('username')NEWLINE return render_template('user_profile.html', **context)[email protected]('/users/user/<username>/edit', methods=["GET", "POST"])NEWLINE@login_required(lvl=2)NEWLINEdef user_profile_edit(username):NEWLINE if request.method == 'GET':NEWLINE context = get_default_context()NEWLINE context['messages'] = get_flashed_messages()NEWLINENEWLINE if context['permission_lvl'] == 1:NEWLINE passNEWLINE elif username != context['username']:NEWLINE abort(401)NEWLINENEWLINE if not user_exists(username):NEWLINE abort(404)NEWLINENEWLINE db_session = get_session()NEWLINE user = db_session.query(User).filter(User.username == username).one()NEWLINENEWLINE context['user'] = userNEWLINENEWLINE return render_template('user_profile_edit.html', **context)NEWLINENEWLINE if request.method == 'POST':NEWLINE db_session = get_session()NEWLINENEWLINE email = request.form['email']NEWLINE book_limit = request.form.get('book_limit')NEWLINE permission_lvl = request.form.get('permission_lvl')NEWLINENEWLINE user = db_session.query(User).filter(User.username == username).one()NEWLINENEWLINE user.email = emailNEWLINE if permission_lvl:NEWLINE user.permission_lvl = permission_lvlNEWLINE if book_limit:NEWLINE user.book_limit = book_limitNEWLINENEWLINE db_session.commit()NEWLINENEWLINE flash('Changes saved')NEWLINENEWLINE return redirect(f'/users/user/{username}/edit')[email protected]('/search_results')NEWLINEdef search_results():NEWLINE arg = request.args.get('query')NEWLINE db_session = get_session()NEWLINE context = get_default_context()NEWLINENEWLINE books = db_session.query(Book).filter(NEWLINE or_(NEWLINE Book.title.contains(arg),NEWLINE Book.author.contains(arg)NEWLINE )NEWLINE ).all()NEWLINENEWLINE context['books'] = booksNEWLINENEWLINE return render_template('search_results.html', **context)[email protected]('/test')NEWLINEdef t_page():NEWLINE result = session.get('username')NEWLINE return str(result)NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE app.run()NEWLINE |
# -*- coding: utf-8 -*-NEWLINE# Generated by Django 1.10.5 on 2017-04-11 07:35NEWLINEfrom __future__ import unicode_literalsNEWLINENEWLINEfrom django.conf import settingsNEWLINEfrom django.db import migrations, modelsNEWLINEimport django.db.models.deletionNEWLINENEWLINENEWLINEclass Migration(migrations.Migration):NEWLINENEWLINE dependencies = [NEWLINE ('vdnapp', '0015_auto_20170411_0735'),NEWLINE ]NEWLINENEWLINE operations = [NEWLINE migrations.AlterField(NEWLINE model_name='organization',NEWLINE name='user',NEWLINE field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),NEWLINE ),NEWLINE ]NEWLINE |
from django.contrib import adminNEWLINEfrom .models import OrderItem,orderNEWLINEfrom import_export.admin import ImportExportActionModelAdminNEWLINEfrom django.urls import reverseNEWLINEfrom django.utils.safestring import mark_safeNEWLINENEWLINENEWLINEdef order_pdf(obj):NEWLINE return mark_safe('<a href="{}">PDF</a>'.format(reverse('orders:admin_order_pdf',args=[obj.id])))NEWLINENEWLINEorder_pdf.short_description = 'Order PDF'NEWLINENEWLINENEWLINENEWLINEclass OrderItemAdmin(admin.TabularInline):NEWLINE '''NEWLINE Admin View for OrderItemNEWLINE '''NEWLINE model = OrderItemNEWLINE raw_id_fields = ['product']NEWLINENEWLINENEWLINEclass OrderAdmin(ImportExportActionModelAdmin):NEWLINE '''NEWLINE Admin View for OrderNEWLINE '''NEWLINE list_display = ('id','first_name','last_name','address','email','city','postal_code','paid',order_pdf,)NEWLINE list_filter = ('paid','created','updated',)NEWLINE search_fields = ['first_name','last_name','email']NEWLINE inlines = [NEWLINE OrderItemAdmin,NEWLINE ]NEWLINENEWLINEadmin.site.register(order, OrderAdmin) |
"""NEWLINE==============================NEWLINEWFIRST Instruments (pre-alpha)NEWLINE==============================NEWLINENEWLINEWARNING: This model has not yet been validated against other PSFNEWLINE simulations, and uses several approximations (e.g. forNEWLINE mirror polishing errors, which are taken from HST).NEWLINE"""NEWLINENEWLINEimport os.pathNEWLINEimport poppyNEWLINEimport numpy as npNEWLINEfrom . import webbpsf_coreNEWLINEfrom scipy.interpolate import griddataNEWLINEfrom astropy.io import fitsNEWLINEimport loggingNEWLINENEWLINE_log = logging.getLogger('webbpsf')NEWLINEimport pprintNEWLINENEWLINENEWLINEclass WavelengthDependenceInterpolator(object):NEWLINE """WavelengthDependenceInterpolator can be configured withNEWLINE `n_zernikes` worth of Zernike coefficients at up to `n_wavelengths`NEWLINE wavelengths, and will let you `get_aberration_terms` for anyNEWLINE wavelength in range interpolated linearly between measured/knownNEWLINE pointsNEWLINE """NEWLINENEWLINE def __init__(self, n_wavelengths=16, n_zernikes=22):NEWLINE self._n_wavelengths = n_wavelengthsNEWLINE self._n_zernikes = n_zernikesNEWLINE self._aberration_terms = np.zeros((n_wavelengths, n_zernikes), dtype=np.float64)NEWLINE self._wavelengths = []NEWLINENEWLINE def set_aberration_terms(self, wavelength, zernike_array):NEWLINE """Supply a reference `wavelength` and a `zernike_array`NEWLINE (of length `n_zernikes`) where the aberration is knownNEWLINE """NEWLINE n_wavelengths_set = len(self._wavelengths)NEWLINE if wavelength not in self._wavelengths and n_wavelengths_set < self._n_wavelengths:NEWLINE self._wavelengths.append(wavelength)NEWLINE aberration_row_idx = n_wavelengths_set # which is now index of last rowNEWLINE elif wavelength in self._wavelengths:NEWLINE aberration_row_idx = self._wavelengths.index(wavelength)NEWLINE else:NEWLINE # can't add more wavelengths without allocating new _aberration_terms arrayNEWLINE raise ValueError("Already have information at {} wavelengths "NEWLINE "(pass larger n_wavelengths to __init__?)".format(self._n_wavelengths))NEWLINE if len(zernike_array) != self._n_zernikes:NEWLINE raise ValueError("Expected {} aberration terms (pass different "NEWLINE "n_zernikes to __init__?)".format(self._n_zernikes))NEWLINE self._aberration_terms[aberration_row_idx] = zernike_arrayNEWLINENEWLINE def get_aberration_terms(self, wavelength):NEWLINE """Return the Zernike coefficients as interpolated for thisNEWLINE `wavelength`"""NEWLINE # return array of length n_zernikes interpolated for this wavelengthNEWLINE if wavelength in self._wavelengths:NEWLINE # aberration known exactly for this wavelengthNEWLINE aberration_row_idx = self._wavelengths.index(wavelength)NEWLINE return self._aberration_terms[aberration_row_idx]NEWLINE else:NEWLINE # we have to interpolate @ this wavelengthNEWLINE aberration_terms = griddata(self._wavelengths, self._aberration_terms, wavelength, method='linear')NEWLINE if np.any(np.isnan(aberration_terms)):NEWLINE raise RuntimeError("Attempted to get aberrations at wavelength "NEWLINE "outside the range of the reference data")NEWLINE return aberration_termsNEWLINENEWLINENEWLINEclass FieldDependentAberration(poppy.ZernikeWFE):NEWLINE """FieldDependentAberration incorporates aberrations thatNEWLINE are interpolated in wavelength, x, and y pixel positions byNEWLINE computing the Zernike coefficients for a particular wavelengthNEWLINE and position.NEWLINE """NEWLINENEWLINE """By default, `get_aberration_terms` will zero out Z1, Z2, and Z3NEWLINE (piston, tip, and tilt) as they are not meaningful for telescopeNEWLINE PSF calculations (the former is irrelevant, the latter two wouldNEWLINE be handled by a distortion solution). ChangeNEWLINE `_omit_piston_tip_tilt` to False to include the Z1-3 terms."""NEWLINE _omit_piston_tip_tilt = TrueNEWLINE _field_position = NoneNEWLINENEWLINE def __init__(self, pixel_width, pixel_height,NEWLINE name="Field-dependent Aberration", radius=1.0, oversample=1, interp_order=3):NEWLINE self.pixel_width, self.pixel_height = pixel_width, pixel_heightNEWLINE self.field_position = pixel_width // 2, pixel_height // 2NEWLINE self._wavelength_interpolators = {}NEWLINE self.pupil_diam = radius * 2.0NEWLINE super(FieldDependentAberration, self).__init__(NEWLINE name=name,NEWLINE verbose=True,NEWLINE radius=radius,NEWLINE oversample=oversample,NEWLINE interp_order=interp_orderNEWLINE )NEWLINENEWLINE def get_opd(self, wave, units='meters'):NEWLINE """Set the Zernike coefficients (for ZernikeWFE.getOPD) basedNEWLINE on the wavelength of the incoming wavefront and the pixelNEWLINE positionNEWLINE """NEWLINE if not isinstance(wave, poppy.Wavefront):NEWLINE wavelength = waveNEWLINE else:NEWLINE wavelength = wave.wavelengthNEWLINE self.coefficients = wavelength * self.get_aberration_terms(wavelength)NEWLINE return super(FieldDependentAberration, self).get_opd(wave, units=units)NEWLINENEWLINE @propertyNEWLINE def field_position(self):NEWLINE return self._field_positionNEWLINENEWLINE @field_position.setterNEWLINE def field_position(self, position):NEWLINE """Set the x and y pixel position on the detector for which toNEWLINE interpolate aberrations"""NEWLINE x_pixel, y_pixel = positionNEWLINE if x_pixel > self.pixel_width or x_pixel < 0:NEWLINE raise ValueError("Requested pixel_x position lies outside "NEWLINE "the detector width ({})".format(x_pixel))NEWLINE if y_pixel > self.pixel_height or y_pixel < 0:NEWLINE raise ValueError("Requested pixel_y position lies outside "NEWLINE "the detector height ({})".format(y_pixel))NEWLINENEWLINE self._field_position = x_pixel, y_pixelNEWLINENEWLINE def add_field_point(self, x_pixel, y_pixel, interpolator):NEWLINE """Supply a wavelength-space interpolator for a pixel positionNEWLINE on the detector"""NEWLINE self._wavelength_interpolators[(x_pixel, y_pixel)] = interpolatorNEWLINENEWLINE def get_aberration_terms(self, wavelength):NEWLINE """Supply the Zernike coefficients for the aberration based onNEWLINE the wavelength and pixel position on the detector"""NEWLINE if self.field_position in self._wavelength_interpolators:NEWLINE # short path: this is a known pointNEWLINE interpolator = self._wavelength_interpolators[self.field_position]NEWLINE coefficients = interpolator.get_aberration_terms(wavelength)NEWLINE else:NEWLINE # get aberrations at all field pointsNEWLINE field_points, aberration_terms = [], []NEWLINE for field_point_coords, point_interpolator in self._wavelength_interpolators.items():NEWLINE field_points.append(field_point_coords)NEWLINE aberration_terms.append(point_interpolator.get_aberration_terms(wavelength))NEWLINE aberration_array = np.asarray(aberration_terms)NEWLINE assert len(aberration_array.shape) == 2, "computed aberration array is not 2D " \NEWLINE "(inconsistent number of Zernike terms " \NEWLINE "at each point?)"NEWLINE coefficients = griddata(NEWLINE np.asarray(field_points),NEWLINE np.asarray(aberration_terms),NEWLINE self.field_position,NEWLINE method='linear'NEWLINE )NEWLINE if np.any(np.isnan(coefficients)):NEWLINE raise RuntimeError("Attempted to get aberrations for an out-of-bounds field point")NEWLINE if self._omit_piston_tip_tilt:NEWLINE _log.debug("Omitting piston/tip/tilt")NEWLINE coefficients[:3] = 0.0 # omit piston, tip, and tilt ZernikesNEWLINE return coefficientsNEWLINENEWLINENEWLINEdef _load_wfi_detector_aberrations(filename):NEWLINE from astropy.io import asciiNEWLINE zernike_table = ascii.read(filename)NEWLINE detectors = {}NEWLINENEWLINE def build_detector_from_table(number, zernike_table):NEWLINE """Build a FieldDependentAberration optic for a detector usingNEWLINE Zernikes Z1-Z22 at various wavelengths and field points"""NEWLINE single_detector_info = zernike_table[zernike_table['sca'] == number]NEWLINE field_points = set(single_detector_info['field_point'])NEWLINE interpolators = {}NEWLINE detector = FieldDependentAberration(NEWLINE 4096,NEWLINE 4096,NEWLINE radius=WFIRSTInstrument.PUPIL_RADIUS,NEWLINE name="Field Dependent Aberration (SCA{:02})".format(number)NEWLINE )NEWLINE for field_id in field_points:NEWLINE field_point_rows = single_detector_info[single_detector_info['field_point'] == field_id]NEWLINE local_x, local_y = field_point_rows[0]['local_x'], field_point_rows[0]['local_y']NEWLINE interpolator = build_wavelength_dependence(field_point_rows)NEWLINENEWLINE midpoint_pixel = 4096 / 2NEWLINE # (local_x in mm / 10 um pixel size) -> * 1e2NEWLINE # local_x and _y range from -20.44 to +20.44, so adding to the midpoint pixelNEWLINE # makes sense to place (-20.44, -20.44) at (4, 4)NEWLINE pixx, pixy = (round(midpoint_pixel + local_x * 1e2),NEWLINE round(midpoint_pixel + local_y * 1e2))NEWLINENEWLINE detector.add_field_point(pixx, pixy, interpolator)NEWLINE return detectorNEWLINENEWLINE def build_wavelength_dependence(rows):NEWLINE """Build an interpolator object that interpolates Z1-Z22 inNEWLINE wavelength space"""NEWLINE wavelengths = set(rows['wavelength'])NEWLINE interpolator = WavelengthDependenceInterpolator(n_wavelengths=len(wavelengths),NEWLINE n_zernikes=22)NEWLINE for row in rows:NEWLINE z = np.zeros(22)NEWLINE for idx in range(22):NEWLINE z[idx] = row['Z{}'.format(idx + 1)]NEWLINE interpolator.set_aberration_terms(row['wavelength'] * 1e-6, z)NEWLINENEWLINE return interpolatorNEWLINENEWLINE detector_ids = set(zernike_table['sca'])NEWLINE for detid in detector_ids:NEWLINE detectors["SCA{:02}".format(detid)] = build_detector_from_table(detid, zernike_table)NEWLINENEWLINE return detectorsNEWLINENEWLINENEWLINEclass WFIRSTInstrument(webbpsf_core.SpaceTelescopeInstrument):NEWLINE PUPIL_RADIUS = 2.4 / 2.0NEWLINE """NEWLINE WFIRSTInstrument contains data and functionality common to WFIRSTNEWLINE instruments, such as setting the pupil shapeNEWLINE """NEWLINE telescope = "WFIRST"NEWLINENEWLINE def __init__(self, *args, **kwargs):NEWLINE super(WFIRSTInstrument, self).__init__(*args, **kwargs)NEWLINENEWLINE # slightly different versions of the following two functionsNEWLINE # from the parent superclassNEWLINE # in order to interface with the FieldDependentAberration classNEWLINE @propertyNEWLINE def detector_position(self):NEWLINE """The pixel position in (X, Y) on the detector"""NEWLINE return self._detectors[self._detector].field_positionNEWLINENEWLINE @detector_position.setterNEWLINE def detector_position(self, position):NEWLINE # exact copy of superclass function except we save theNEWLINE # into a different location.NEWLINE try:NEWLINE x, y = map(int, position)NEWLINE except ValueError:NEWLINE raise ValueError("Detector pixel coordinates must be pairs of nonnegative numbers, "NEWLINE "not {}".format(position))NEWLINE if x < 0 or y < 0:NEWLINE raise ValueError("Detector pixel coordinates must be nonnegative integers")NEWLINE if x > self._detector_npixels - 1 or y > self._detector_npixels - 1:NEWLINE raise ValueError("The maximum allowed detector pixel "NEWLINE "coordinate value is {}".format(self._detector_npixels - 1))NEWLINENEWLINE self._detectors[self._detector].field_position = (int(position[0]), int(position[1]))NEWLINENEWLINE def _get_aberrations(self):NEWLINE """Get the OpticalElement that applies the field-dependentNEWLINE optical aberrations. (Called in _getOpticalSystem.)"""NEWLINE return self._detectors[self._detector]NEWLINENEWLINE def _get_fits_header(self, result, options):NEWLINE """Populate FITS Header keywords"""NEWLINE super(WFIRSTInstrument, self)._get_fits_header(result, options)NEWLINE result[0].header['DETXPIXL'] = (self.detector_position[0],NEWLINE 'X pixel position (for field dependent aberrations)')NEWLINE result[0].header['DETYPIXL'] = (self.detector_position[1],NEWLINE 'Y pixel position (for field dependent aberrations)')NEWLINE result[0].header['DETECTOR'] = (self.detector, 'Detector selected')NEWLINENEWLINENEWLINEclass WFI(WFIRSTInstrument):NEWLINE """NEWLINE WFI represents to the to-be-named wide field imagerNEWLINE for the WFIRST missionNEWLINENEWLINE WARNING: This model has not yet been validated against other PSFNEWLINE simulations, and uses several approximations (e.g. forNEWLINE mirror polishing errors, which are taken from HST).NEWLINE """NEWLINE # "The H158, F184 and W149 filters and the grism are mounted with proximate cold pupil masks"NEWLINE # from the final draft of the SDT report, page 92, table 3-2NEWLINE UNMASKED_PUPIL_WAVELENGTH_MIN, UNMASKED_PUPIL_WAVELENGTH_MAX = 0.760e-6, 1.454e-6NEWLINE MASKED_PUPIL_WAVELENGTH_MIN, MASKED_PUPIL_WAVELENGTH_MAX = 1.380e-6, 2.000e-6NEWLINENEWLINE def __init__(self, set_pupil_mask_on=None):NEWLINE """NEWLINE Initiate WFINEWLINENEWLINE ParametersNEWLINE -----------NEWLINE set_pupil_mask_on : bool or NoneNEWLINE Set to True or False to force using or not using the cold pupil mask,NEWLINE or to None for the automatic behavior.NEWLINE """NEWLINE pixelscale = 110e-3 # arcsec/px, WFIRST-AFTA SDT report final version (p. 91)NEWLINE super(WFI, self).__init__("WFI", pixelscale=pixelscale)NEWLINENEWLINE self._detector_npixels = 4096NEWLINE self._detectors = _load_wfi_detector_aberrations(os.path.join(self._datapath, 'wim_zernikes_cycle7.csv'))NEWLINE assert len(self._detectors.keys()) > 0NEWLINE self.detector = 'SCA01'NEWLINENEWLINE # Paths to the two possible pupils. The correct one is selected based on requestedNEWLINE # wavelengths in _validate_config()NEWLINE self._unmasked_pupil_path = os.path.join(self._WebbPSF_basepath,NEWLINE 'WFIRST_SRR_WFC_Pupil_Mask_Shortwave_2048.fits')NEWLINE self._masked_pupil_path = os.path.join(self._WebbPSF_basepath,NEWLINE 'WFIRST_SRR_WFC_Pupil_Mask_Longwave_2048.fits')NEWLINENEWLINE # Flag to en-/disable automatic selection of the appropriate pupil_maskNEWLINE self.auto_pupil = TrueNEWLINENEWLINE self._pupil_mask = "AUTO"NEWLINE self.pupil_mask_list = ['AUTO', 'COLD_PUPIL', 'UNMASKED']NEWLINENEWLINE self.pupil = self._unmasked_pupil_pathNEWLINE if set_pupil_mask_on is not None:NEWLINE if isinstance(set_pupil_mask_on, bool):NEWLINE self.auto_pupil = FalseNEWLINE _log.info("Using custom pupil mask")NEWLINE if set_pupil_mask_on:NEWLINE self.pupil = self._masked_pupil_pathNEWLINE else:NEWLINE raise TypeError("set_pupil_mask_on parameter must be boolean")NEWLINENEWLINE self.opd_list = [NEWLINE os.path.join(self._WebbPSF_basepath, 'upscaled_HST_OPD.fits'),NEWLINE ]NEWLINE self.pupilopd = self.opd_list[-1]NEWLINENEWLINE def _validate_config(self, **kwargs):NEWLINE """Validates that the WFI is configured sensiblyNEWLINENEWLINE This mainly consists of selecting the masked or unmasked pupilNEWLINE appropriately based on the wavelengths requested.NEWLINE """NEWLINE if self.auto_pupil and self.pupil in (self._unmasked_pupil_path, self._masked_pupil_path):NEWLINE # Does the wavelength range fit entirely in an unmasked filter?NEWLINE wavelengths = np.array(kwargs['wavelengths'])NEWLINE wl_min, wl_max = np.min(wavelengths), np.max(wavelengths)NEWLINE # test shorter filters first; if wl range fits entirely in one of them, it's not goingNEWLINE # to be the (masked) wideband filterNEWLINE if wl_max <= self.UNMASKED_PUPIL_WAVELENGTH_MAX:NEWLINE # use unmasked pupil opticNEWLINE self.pupil = self._unmasked_pupil_pathNEWLINE _log.info("Using the unmasked WFI pupil shape based on wavelengths requested")NEWLINE else:NEWLINE if wl_max > self.MASKED_PUPIL_WAVELENGTH_MAX:NEWLINE _log.warn("Requested wavelength is > 2e-6 m, defaulting to masked pupil shape")NEWLINE # use masked pupil opticNEWLINE self.pupil = self._masked_pupil_pathNEWLINE _log.info("Using the masked WFI pupil shape based on wavelengths requested")NEWLINE else:NEWLINE # If the user has set the pupil to a custom value, let them worry about theNEWLINE # correct shape it should haveNEWLINE passNEWLINE super(WFI, self)._validate_config(**kwargs)NEWLINENEWLINE @propertyNEWLINE def pupil_mask(self):NEWLINE return self._pupil_maskNEWLINENEWLINE @pupil_mask.setterNEWLINE def pupil_mask(self, name):NEWLINE """ Set the pupil maskNEWLINENEWLINE ParametersNEWLINE ------------NEWLINE name : stringNEWLINE Name of setting.NEWLINE Settings:NEWLINE - "AUTO":NEWLINE Automatically select pupilNEWLINE - "COLD_PUPIL":NEWLINE Masked pupil overrideNEWLINE - "UNMASKED":NEWLINE Unmasked pupil overrideNEWLINE """NEWLINENEWLINE if name and isinstance(name, str):NEWLINE name = name.upper()NEWLINE if "AUTO" == name:NEWLINE self.auto_pupil = TrueNEWLINE _log.info("Using default pupil mask.")NEWLINE elif "COLD_PUPIL" == name:NEWLINE self.auto_pupil = FalseNEWLINE _log.info("Using custom pupil mask: Masked Pupil.")NEWLINE self.pupil = self._masked_pupil_pathNEWLINE elif "UNMASKED" == name:NEWLINE self.auto_pupil = FalseNEWLINE _log.info("Using custom pupil mask: Unmasked Pupil.")NEWLINE self.pupil = self._unmasked_pupil_pathNEWLINE else:NEWLINE raise ValueError("Instrument {0} doesn't have a pupil mask called '{1}'.".format(self.name, name))NEWLINE else:NEWLINE raise ValueError("Pupil mask setting is not valid or empty.")NEWLINE self._pupil_mask = nameNEWLINENEWLINE def _addAdditionalOptics(self, optsys, **kwargs):NEWLINE return optsys, False, NoneNEWLINENEWLINENEWLINEclass CGI(WFIRSTInstrument):NEWLINE """NEWLINE WFIRST Coronagraph InstrumentNEWLINENEWLINE Simulates the PSF of the WFIRST coronagraph.NEWLINENEWLINE Current functionality is limited to the Shaped Pupil Coronagraph (SPC)NEWLINE observing modes, and these modes are only simulated with static, unaberratedNEWLINE wavefronts, without relay optics and without DM control. The designNEWLINE respresented here is an approximation to a baseline concept, and will beNEWLINE subject to change based on trades studies and technology development.NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE mode : strNEWLINE CGI observing mode. If not specified, the __init__ functionNEWLINE will set this to a default mode 'CHARSPC_F660'NEWLINE pixelscale : floatNEWLINE Detector pixelscale. If not specified, the pixelscale will default toNEWLINE 0.02 arcsec for configurations usint the IMAGER camera and 0.025 arcsecNEWLINE for the IFS.NEWLINE fov_arcsec : floatNEWLINE Field of view in arcseconds. If not specified, the field of view willNEWLINE default to 3.20 arcsec for the IMAGER camera and 1.76 arcsec for the IFS.NEWLINENEWLINE """NEWLINENEWLINE camera_list = ['IMAGER', 'IFS']NEWLINE filter_list = ['F660', 'F721', 'F770', 'F890']NEWLINE apodizer_list = ['CHARSPC', 'DISKSPC']NEWLINE fpm_list = ['CHARSPC_F660_BOWTIE', 'CHARSPC_F770_BOWTIE', 'CHARSPC_F890_BOWTIE', 'DISKSPC_F721_ANNULUS']NEWLINE lyotstop_list = ['LS30D88']NEWLINENEWLINE _mode_table = { # MODE CAMERA FILTER APODIZER FPM LYOT STOPNEWLINE 'CHARSPC_F660': ('IFS', 'F660', 'CHARSPC', 'CHARSPC_F660_BOWTIE', 'LS30D88'),NEWLINE 'CHARSPC_F770': ('IFS', 'F770', 'CHARSPC', 'CHARSPC_F770_BOWTIE', 'LS30D88'),NEWLINE 'CHARSPC_F890': ('IFS', 'F890', 'CHARSPC', 'CHARSPC_F890_BOWTIE', 'LS30D88'),NEWLINE 'DISKSPC_F721': ('IMAGER', 'F721', 'DISKSPC', 'DISKSPC_F721_ANNULUS', 'LS30D88')}NEWLINENEWLINE def __init__(self, mode=None, pixelscale=None, fov_arcsec=None, apply_static_opd=False):NEWLINE super(CGI, self).__init__("CGI", pixelscale=pixelscale)NEWLINENEWLINE self._detector_npixels = 1024NEWLINE self._detectors = {camera: 'placeholder' for camera in self.camera_list}NEWLINENEWLINE self.pupil_mask_list = self.lyotstop_list # alias for use in webbpsf_coreNEWLINE self.image_mask_list = self.fpm_list # alias for use in webbpsf_coreNEWLINE self.pupil = os.path.join(self._WebbPSF_basepath, 'AFTA_CGI_C5_Pupil_onax_256px_flip.fits')NEWLINE if apply_static_opd:NEWLINE self.pupilopd = os.path.join(self._WebbPSF_basepath, 'CGI', 'OPD', 'CGI_static_OPD.fits')NEWLINE else:NEWLINE self.pupilopd = NoneNEWLINE self.aberration_optic = NoneNEWLINE self.options = {'force_coron': True}NEWLINE # Allow the user to pre-emptively override the default instrument FoV and pixel scaleNEWLINE if fov_arcsec is not None:NEWLINE self.fov_arcsec = fov_arcsecNEWLINE self._override_fov = TrueNEWLINE else:NEWLINE self._override_fov = FalseNEWLINE if pixelscale is not None:NEWLINE self._pixelscale = pixelscaleNEWLINE self._override_pixelscale = TrueNEWLINE else:NEWLINE self._override_pixelscale = FalseNEWLINENEWLINE if mode is None:NEWLINE self.print_mode_table()NEWLINE _log.info("Since the mode was not specified at instantiation, defaulting to CHARSPC_F660")NEWLINE self.mode = 'CHARSPC_F660'NEWLINE else:NEWLINE self.mode = modeNEWLINENEWLINE @propertyNEWLINE def camera(self):NEWLINE """Currently selected camera name"""NEWLINE return self._cameraNEWLINENEWLINE @camera.setterNEWLINE def camera(self, value):NEWLINE value = value.upper() # force to uppercaseNEWLINE if value not in self.camera_list:NEWLINE raise ValueError("Instrument {0} doesn't have a camera called {1}.".format(self.name, value))NEWLINE self._camera = valueNEWLINE if value == 'IMAGER':NEWLINE if not hasattr(self, 'fov_arcsec') or not self._override_fov:NEWLINE self.fov_arcsec = 3.2NEWLINE if not hasattr(self, 'pixelscale') or not self._override_pixelscale:NEWLINE self.pixelscale = 0.020 # Nyquist at 465 nmNEWLINE else: # default to 'IFS'NEWLINE if not hasattr(self, 'fov_arcsec') or not self._override_fov:NEWLINE self.fov_arcsec = 2 * 0.82 # 2015 SDT report, Section 3.4.1.1.1:NEWLINE # IFS has 76 lenslets across the (2 x 0.82) arcsec FoV.NEWLINE if not hasattr(self, 'pixelscale') or not self._override_pixelscale:NEWLINE self.pixelscale = 0.025 # Nyquist at 600 nmNEWLINENEWLINE # for CGI, there is one detector per camera and it should be set automatically.NEWLINE @propertyNEWLINE def detector(self):NEWLINE return self.cameraNEWLINENEWLINE @detector.setterNEWLINE def detector(self, value):NEWLINE raise RuntimeError("Can't set detector directly for CGI; set camera instead.")NEWLINENEWLINE @propertyNEWLINE def filter(self):NEWLINE """Currently selected filter name"""NEWLINE return self._filterNEWLINENEWLINE @filter.setterNEWLINE def filter(self, value):NEWLINE value = value.upper() # force to uppercaseNEWLINE if value not in self.filter_list:NEWLINE raise ValueError("Instrument {0} doesn't have a filter called {1}.".format(self.name, value))NEWLINE self._filter = valueNEWLINENEWLINE @propertyNEWLINE def apodizer(self):NEWLINE """Currently selected apodizer name"""NEWLINE return self._apodizerNEWLINENEWLINE @apodizer.setterNEWLINE def apodizer(self, value):NEWLINE value = value.upper() # force to uppercaseNEWLINE if value not in self.apodizer_list:NEWLINE raise ValueError("Instrument {0} doesn't have a apodizer called {1}.".format(self.name, value))NEWLINE self._apodizer = valueNEWLINE if value == 'DISKSPC':NEWLINE self._apodizer_fname = \NEWLINE os.path.join(self._datapath, "optics/DISKSPC_SP_256pix.fits.gz")NEWLINE else: # for now, default to CHARSPCNEWLINE self._apodizer_fname = \NEWLINE os.path.join(self._datapath, "optics/CHARSPC_SP_256pix.fits.gz")NEWLINENEWLINE @propertyNEWLINE def fpm(self):NEWLINE """Currently selected FPM name"""NEWLINE return self._fpmNEWLINENEWLINE @fpm.setterNEWLINE def fpm(self, value):NEWLINE value = value.upper() # force to uppercaseNEWLINE if value not in self.fpm_list:NEWLINE raise ValueError("Instrument {0} doesn't have a FPM called {1}.".format(self.name, value))NEWLINE self._fpm = valueNEWLINE if value.startswith('DISKSPC'):NEWLINE self._fpmres = 3NEWLINE self._owa = 20.NEWLINE self._Mfpm = int(np.ceil(self._fpmres * self._owa))NEWLINE self._fpm_fname = \NEWLINE os.path.join(self._datapath,NEWLINE "optics/DISKSPC_FPM_65WA200_360deg_-_FP1res{0:d}_evensamp_D{1:03d}_{2:s}.fits.gz".format(NEWLINE self._fpmres, 2 * self._Mfpm, self.filter))NEWLINE else:NEWLINE self._fpmres = 4NEWLINE self._owa = 9.NEWLINE self._Mfpm = int(np.ceil(self._fpmres * self._owa))NEWLINE self._fpm_fname = \NEWLINE os.path.join(self._datapath,NEWLINE "optics/CHARSPC_FPM_25WA90_2x65deg_-_FP1res{0:d}_evensamp_D{1:03d}_{2:s}.fits.gz".format(NEWLINE self._fpmres, 2 * self._Mfpm, self.filter))NEWLINENEWLINE @propertyNEWLINE def lyotstop(self):NEWLINE """Currently selected Lyot stop name"""NEWLINE return self._lyotstopNEWLINENEWLINE @lyotstop.setterNEWLINE def lyotstop(self, value):NEWLINE # preserve case for this one since we're used to that with the lyot mask namesNEWLINE if value not in self.lyotstop_list:NEWLINE raise ValueError("Instrument {0} doesn't have a Lyot mask called {1}.".format(self.name, value))NEWLINE self._lyotstop = valueNEWLINE self._lyotstop_fname = os.path.join(self._datapath, "optics/SPC_LS_30D88_256pix.fits.gz")NEWLINENEWLINE @propertyNEWLINE def mode_list(self):NEWLINE """Available Observation Modes"""NEWLINE keys = self._mode_table.keys()NEWLINE keys = sorted(keys)NEWLINE return keysNEWLINENEWLINE # mode works differently since it's a meta-property that affects the other ones:NEWLINE @propertyNEWLINE def mode(self):NEWLINE """Currently selected mode name"""NEWLINE for modename, settings in self._mode_table.items():NEWLINE if (self.camera == settings[0].upper() and self.filter == settings[1].upper() andNEWLINE self.apodizer == settings[2].upper() and self.fpm == settings[3].upper() andNEWLINE self.lyotstop == settings[4]):NEWLINE return modenameNEWLINE return 'Custom'NEWLINENEWLINE @mode.setterNEWLINE def mode(self, value):NEWLINE if value not in self.mode_list:NEWLINE raise ValueError("Instrument {0} doesn't have a mode called {1}.".format(self.name, value))NEWLINE settings = self._mode_table[value]NEWLINE self.camera = settings[0]NEWLINE self.filter = settings[1]NEWLINE self.apodizer = settings[2]NEWLINE self.fpm = settings[3]NEWLINE self.lyotstop = settings[4]NEWLINE _log.info('Set the following optical configuration:')NEWLINE _log.info('camera = {0}, filter = {1}, apodizer = {2}, fpm = {3}, lyotstop = {4}'.format(\NEWLINE self.camera, self.filter, self.apodizer, self.fpm, self.lyotstop))NEWLINENEWLINE def print_mode_table(self):NEWLINE """Print the table of observing mode options and their associated optical configuration"""NEWLINE _log.info("Printing the table of WFIRST CGI observing modes supported by WebbPSF.")NEWLINE _log.info("Each is defined by a combo of camera, filter, apodizer, "NEWLINE "focal plane mask (FPM), and Lyot stop settings:")NEWLINE _log.info(pprint.pformat(self._mode_table))NEWLINENEWLINE @propertyNEWLINE def detector_position(self):NEWLINE """The pixel position in (X, Y) on the detector"""NEWLINE return 512, 512NEWLINENEWLINE @detector_position.setterNEWLINE def detector_position(self, position):NEWLINE raise RuntimeError("Detector position not adjustable for CGI")NEWLINENEWLINE def _validate_config(self, **kwargs):NEWLINE super(CGI, self)._validate_config(**kwargs)NEWLINENEWLINE def _addAdditionalOptics(self, optsys, oversample=4):NEWLINE """Add coronagraphic or spectrographic optics for WFIRST CGI."""NEWLINENEWLINE trySAM = FalseNEWLINENEWLINE if ('pupil_shift_x' in self.options and self.options['pupil_shift_x'] != 0) or \NEWLINE ('pupil_shift_y' in self.options and self.options['pupil_shift_y'] != 0):NEWLINE shift = (self.options['pupil_shift_x'], self.options['pupil_shift_y'])NEWLINE else:NEWLINE shift = NoneNEWLINENEWLINE # Add the shaped pupil apodizerNEWLINE optsys.add_pupil(transmission=self._apodizer_fname, name=self.apodizer, shift=None)NEWLINENEWLINE # Add the FPMNEWLINE optsys.add_image(transmission=self._fpm_fname, name=self.fpm)NEWLINENEWLINE # Add Lyot stopNEWLINE self.pupil_mask = self.lyotstopNEWLINE optsys.add_pupil(transmission=self._lyotstop_fname, name=self.lyotstop, shift=shift)NEWLINENEWLINE # Cast as MatrixFTCoronagraph; this configures the detectorNEWLINE occ_box_size = 1.NEWLINE mft_optsys = poppy.MatrixFTCoronagraph(optsys, oversample=oversample, occulter_box=occ_box_size)NEWLINENEWLINE return mft_optsys, trySAM, occ_box_sizeNEWLINENEWLINE def _get_aberrations(self):NEWLINE """Get the OpticalElement that applies the field-dependentNEWLINE optical aberrations. (Called in _getOpticalSystem.)"""NEWLINE return NoneNEWLINENEWLINE def _get_fits_header(self, result, options):NEWLINE """Populate FITS Header keywords"""NEWLINE super(WFIRSTInstrument, self)._get_fits_header(result, options)NEWLINE pupil_hdr = fits.getheader(self.pupil)NEWLINE apodizer_hdr = fits.getheader(self._apodizer_fname)NEWLINE fpm_hdr = fits.getheader(self._fpm_fname)NEWLINE lyotstop_hdr = fits.getheader(self._lyotstop_fname)NEWLINENEWLINE result[0].header.set('MODE', self.mode, comment='Observing mode')NEWLINE result[0].header.set('CAMERA', self.camera, comment='Imager or IFS')NEWLINE result[0].header.set('APODIZER', self.apodizer, comment='Apodizer')NEWLINE result[0].header.set('APODTRAN', os.path.basename(self._apodizer_fname),NEWLINE comment='Apodizer transmission')NEWLINE result[0].header.set('PUPLSCAL', apodizer_hdr['PUPLSCAL'],NEWLINE comment='Apodizer pixel scale in m/pixel')NEWLINE result[0].header.set('PUPLDIAM', apodizer_hdr['PUPLDIAM'],NEWLINE comment='Full apodizer array size, incl padding.')NEWLINE result[0].header.set('FPM', self.fpm, comment='Focal plane mask')NEWLINE result[0].header.set('FPMTRAN', os.path.basename(self._fpm_fname),NEWLINE comment='FPM transmission')NEWLINE result[0].header.set('FPMSCAL', fpm_hdr['PIXSCALE'], comment='FPM spatial sampling, arcsec/pix')NEWLINE result[0].header.set('LYOTSTOP', self.lyotstop, comment='Lyot stop')NEWLINE result[0].header.set('LSTRAN', os.path.basename(self._lyotstop_fname),NEWLINE comment='Lyot stop transmission')NEWLINE result[0].header.set('PUPLSCAL', lyotstop_hdr['PUPLSCAL'],NEWLINE comment='Lyot stop pixel scale in m/pixel')NEWLINE result[0].header.set('PUPLDIAM', lyotstop_hdr['PUPLDIAM'],NEWLINE comment='Lyot stop array size, incl padding.')NEWLINE |
# Copyright 2021 Google LLCNEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINE"""Module for running fuzzers."""NEWLINEimport enumNEWLINEimport loggingNEWLINEimport osNEWLINEimport shutilNEWLINEimport sysNEWLINEimport timeNEWLINENEWLINEimport clusterfuzz_deploymentNEWLINEimport fuzz_targetNEWLINEimport stack_parserNEWLINENEWLINE# pylint: disable=wrong-import-position,import-errorNEWLINEsys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))NEWLINENEWLINEimport utilsNEWLINENEWLINENEWLINEclass RunFuzzersResult(enum.Enum):NEWLINE """Enum result from running fuzzers."""NEWLINE ERROR = 0NEWLINE BUG_FOUND = 1NEWLINE NO_BUG_FOUND = 2NEWLINENEWLINENEWLINEclass BaseFuzzTargetRunner:NEWLINE """Base class for fuzzer runners."""NEWLINENEWLINE def __init__(self, config):NEWLINE self.config = configNEWLINE self.clusterfuzz_deployment = (NEWLINE clusterfuzz_deployment.get_clusterfuzz_deployment(self.config))NEWLINE # Set by the initialize method.NEWLINE self.out_dir = NoneNEWLINE self.fuzz_target_paths = NoneNEWLINE self.artifacts_dir = NoneNEWLINENEWLINE def initialize(self):NEWLINE """Initialization method. Must be called before calling run_fuzz_targets.NEWLINE Returns True on success."""NEWLINE # Use a seperate initialization function so we can return False on failureNEWLINE # instead of exceptioning like we need to do if this were done in theNEWLINE # __init__ method.NEWLINENEWLINE logging.info('Using %s sanitizer.', self.config.sanitizer)NEWLINENEWLINE # TODO(metzman) Add a check to ensure we aren't over time limit.NEWLINE if not self.config.fuzz_seconds or self.config.fuzz_seconds < 1:NEWLINE logging.error(NEWLINE 'Fuzz_seconds argument must be greater than 1, but was: %s.',NEWLINE self.config.fuzz_seconds)NEWLINE return FalseNEWLINENEWLINE self.out_dir = os.path.join(self.config.workspace, 'out')NEWLINE if not os.path.exists(self.out_dir):NEWLINE logging.error('Out directory: %s does not exist.', self.out_dir)NEWLINE return FalseNEWLINENEWLINE self.artifacts_dir = os.path.join(self.out_dir, 'artifacts')NEWLINE if not os.path.exists(self.artifacts_dir):NEWLINE os.mkdir(self.artifacts_dir)NEWLINE elif (not os.path.isdir(self.artifacts_dir) orNEWLINE os.listdir(self.artifacts_dir)):NEWLINE logging.error('Artifacts path: %s exists and is not an empty directory.',NEWLINE self.artifacts_dir)NEWLINE return FalseNEWLINENEWLINE self.fuzz_target_paths = utils.get_fuzz_targets(self.out_dir)NEWLINE logging.info('Fuzz targets: %s', self.fuzz_target_paths)NEWLINE if not self.fuzz_target_paths:NEWLINE logging.error('No fuzz targets were found in out directory: %s.',NEWLINE self.out_dir)NEWLINE return FalseNEWLINENEWLINE return TrueNEWLINENEWLINE def run_fuzz_target(self, fuzz_target_obj): # pylint: disable=no-self-useNEWLINE """Fuzzes with |fuzz_target_obj| and returns the result."""NEWLINE # TODO(metzman): Make children implement this so that the batch runner canNEWLINE # do things differently.NEWLINE result = fuzz_target_obj.fuzz()NEWLINE fuzz_target_obj.free_disk_if_needed()NEWLINE return resultNEWLINENEWLINE @propertyNEWLINE def quit_on_bug_found(self):NEWLINE """Property that is checked to determine if fuzzing should quit after firstNEWLINE bug is found."""NEWLINE raise NotImplementedError('Child class must implement method')NEWLINENEWLINE def get_fuzz_target_artifact(self, target, artifact_name):NEWLINE """Returns the path of a fuzzing artifact named |artifact_name| forNEWLINE |fuzz_target|."""NEWLINE artifact_name = '{target_name}-{sanitizer}-{artifact_name}'.format(NEWLINE target_name=target.target_name,NEWLINE sanitizer=self.config.sanitizer,NEWLINE artifact_name=artifact_name)NEWLINE return os.path.join(self.artifacts_dir, artifact_name)NEWLINENEWLINE def create_fuzz_target_obj(self, target_path, run_seconds):NEWLINE """Returns a fuzz target object."""NEWLINE return fuzz_target.FuzzTarget(target_path, run_seconds, self.out_dir,NEWLINE self.clusterfuzz_deployment, self.config)NEWLINENEWLINE def run_fuzz_targets(self):NEWLINE """Runs fuzz targets. Returns True if a bug was found."""NEWLINE fuzzers_left_to_run = len(self.fuzz_target_paths)NEWLINENEWLINE # Make a copy since we will mutate it.NEWLINE fuzz_seconds = self.config.fuzz_secondsNEWLINENEWLINE min_seconds_per_fuzzer = fuzz_seconds // fuzzers_left_to_runNEWLINE bug_found = FalseNEWLINE for target_path in self.fuzz_target_paths:NEWLINE # By doing this, we can ensure that every fuzz target runs for at leastNEWLINE # min_seconds_per_fuzzer, but that other fuzzers will have longer to runNEWLINE # if one ends early.NEWLINE run_seconds = max(fuzz_seconds // fuzzers_left_to_run,NEWLINE min_seconds_per_fuzzer)NEWLINENEWLINE target = self.create_fuzz_target_obj(target_path, run_seconds)NEWLINE start_time = time.time()NEWLINE result = self.run_fuzz_target(target)NEWLINENEWLINE # It's OK if this goes negative since we take max when determiningNEWLINE # run_seconds.NEWLINE fuzz_seconds -= time.time() - start_timeNEWLINENEWLINE fuzzers_left_to_run -= 1NEWLINE if not result.testcase or not result.stacktrace:NEWLINE logging.info('Fuzzer %s finished running without crashes.',NEWLINE target.target_name)NEWLINE continueNEWLINENEWLINE # We found a bug in the fuzz target.NEWLINE utils.binary_print(b'Fuzzer: %s. Detected bug:\n%s' %NEWLINE (target.target_name.encode(), result.stacktrace))NEWLINENEWLINE # TODO(metzman): Do this with filestore.NEWLINE testcase_artifact_path = self.get_fuzz_target_artifact(NEWLINE target, os.path.basename(result.testcase))NEWLINE shutil.move(result.testcase, testcase_artifact_path)NEWLINE bug_summary_artifact_path = self.get_fuzz_target_artifact(NEWLINE target, 'bug-summary.txt')NEWLINE stack_parser.parse_fuzzer_output(result.stacktrace,NEWLINE bug_summary_artifact_path)NEWLINENEWLINE bug_found = TrueNEWLINE if self.quit_on_bug_found:NEWLINE logging.info('Bug found. Stopping fuzzing.')NEWLINE return bug_foundNEWLINENEWLINE return bug_foundNEWLINENEWLINENEWLINEclass CiFuzzTargetRunner(BaseFuzzTargetRunner):NEWLINE """Runner for fuzz targets used in CI (patch-fuzzing) context."""NEWLINENEWLINE @propertyNEWLINE def quit_on_bug_found(self):NEWLINE return TrueNEWLINENEWLINENEWLINEclass BatchFuzzTargetRunner(BaseFuzzTargetRunner):NEWLINE """Runner for fuzz targets used in batch fuzzing context."""NEWLINENEWLINE @propertyNEWLINE def quit_on_bug_found(self):NEWLINE return FalseNEWLINENEWLINENEWLINEdef get_fuzz_target_runner(config):NEWLINE """Returns a fuzz target runner object based on the run_fuzzers_mode ofNEWLINE |config|."""NEWLINE logging.info('RUN_FUZZERS_MODE is: %s', config.run_fuzzers_mode)NEWLINE if config.run_fuzzers_mode == 'batch':NEWLINE return BatchFuzzTargetRunner(config)NEWLINE return CiFuzzTargetRunner(config)NEWLINENEWLINENEWLINEdef run_fuzzers(config): # pylint: disable=too-many-localsNEWLINE """Runs fuzzers for a specific OSS-Fuzz project.NEWLINENEWLINE Args:NEWLINE config: A RunFuzzTargetsConfig.NEWLINENEWLINE Returns:NEWLINE A RunFuzzersResult enum value indicating what happened during fuzzing.NEWLINE """NEWLINE fuzz_target_runner = get_fuzz_target_runner(config)NEWLINE if not fuzz_target_runner.initialize():NEWLINE # We didn't fuzz at all because of internal (CIFuzz) errors. And we didn'tNEWLINE # find any bugs.NEWLINE return RunFuzzersResult.ERRORNEWLINENEWLINE if not fuzz_target_runner.run_fuzz_targets():NEWLINE # We fuzzed successfully, but didn't find any bugs (in the fuzz target).NEWLINE return RunFuzzersResult.NO_BUG_FOUNDNEWLINENEWLINE # We fuzzed successfully and found bug(s) in the fuzz targets.NEWLINE return RunFuzzersResult.BUG_FOUNDNEWLINE |
import boto3NEWLINEimport sysNEWLINEfrom st2actions.runners.pythonrunner import ActionNEWLINENEWLINEclass GetStackBuildStatus(Action):NEWLINE def run(self, stack_name_or_id):NEWLINE region = self.config['region']NEWLINENEWLINE stack_states = ['CREATE_COMPLETE', 'CREATE_FAILED', 'ROLLBACK_COMPLETE']NEWLINENEWLINE client = boto3.client('cloudformation', region_name=region)NEWLINENEWLINE try:NEWLINE stack_status = client.describe_stacks(StackName=stack_name_or_id)['Stacks'][0]['StackStatus']NEWLINENEWLINE except Exception as err:NEWLINE sys.stderr.write('ERROR: %s\n' % str(err))NEWLINE raiseNEWLINENEWLINE if stack_status not in stack_states:NEWLINE sys.stderr.write('Current state: %s\n' % stack_status)NEWLINE sys.exit(2)NEWLINENEWLINE return TrueNEWLINE |
import plataNEWLINENEWLINENEWLINEdef plata_context(request):NEWLINE """NEWLINE Adds a few variables from Plata to the context if they are available:NEWLINENEWLINE * ``plata.shop``: The current :class:`plata.shop.views.Shop` instanceNEWLINE * ``plata.order``: The current orderNEWLINE * ``plata.contact``: The current contact instanceNEWLINE * ``plata.price_includes_tax``: Whether prices include tax or notNEWLINE """NEWLINENEWLINE shop = plata.shop_instance()NEWLINE return {'plata': {NEWLINE 'shop': shop,NEWLINE 'order': shop.order_from_request(request),NEWLINE 'contact': (shop.contact_from_user(request.user)NEWLINE if hasattr(request, 'user') else None),NEWLINE 'price_includes_tax': shop.price_includes_tax(request),NEWLINE }} if shop else {}NEWLINE |
# -*- coding: utf-8 -*-NEWLINE"""The app module, containing the app factory function."""NEWLINEfrom flask import Flask, render_templateNEWLINENEWLINEfrom hamcwebc import commands, public, userNEWLINEfrom hamcwebc.assets import assetsNEWLINEfrom hamcwebc.extensions import bcrypt, cache, csrf_protect, db, debug_toolbar, login_manager, migrate, celeryNEWLINEfrom hamcwebc.settings import ProdConfigNEWLINENEWLINENEWLINEdef create_app(config_object=ProdConfig):NEWLINE """An application factory, as explained here: http://flask.pocoo.org/docs/patterns/appfactories/.NEWLINE :param config_object: The configuration object to use.NEWLINE """NEWLINE app = Flask(__name__)NEWLINE app.config.from_object(config_object)NEWLINE register_extensions(app)NEWLINE register_blueprints(app)NEWLINE register_errorhandlers(app)NEWLINE register_shellcontext(app)NEWLINE register_commands(app)NEWLINE return appNEWLINENEWLINENEWLINEdef register_extensions(app):NEWLINE """Register Flask extensions."""NEWLINE assets.init_app(app)NEWLINE bcrypt.init_app(app)NEWLINE cache.init_app(app)NEWLINE db.init_app(app)NEWLINE csrf_protect.init_app(app)NEWLINE login_manager.init_app(app)NEWLINE debug_toolbar.init_app(app)NEWLINE migrate.init_app(app, db)NEWLINE celery.conf.update(app.config)NEWLINE return NoneNEWLINENEWLINENEWLINEdef register_blueprints(app):NEWLINE """Register Flask blueprints."""NEWLINE app.register_blueprint(public.views.blueprint)NEWLINE app.register_blueprint(user.views.blueprint)NEWLINE return NoneNEWLINENEWLINENEWLINEdef register_errorhandlers(app):NEWLINE """Register error handlers."""NEWLINE def render_error(error):NEWLINE """Render error template."""NEWLINE # If a HTTPException, pull the `code` attribute; default to 500NEWLINE error_code = getattr(error, 'code', 500)NEWLINE return render_template('{0}.html'.format(error_code)), error_codeNEWLINE for errcode in [401, 404, 500]:NEWLINE app.errorhandler(errcode)(render_error)NEWLINE return NoneNEWLINENEWLINENEWLINEdef register_shellcontext(app):NEWLINE """Register shell context objects."""NEWLINE def shell_context():NEWLINE """Shell context objects."""NEWLINE return {NEWLINE 'db': db,NEWLINE 'celery': celery,NEWLINE 'User': user.models.User}NEWLINENEWLINE app.shell_context_processor(shell_context)NEWLINENEWLINENEWLINEdef register_commands(app):NEWLINE """Register Click commands."""NEWLINE app.cli.add_command(commands.test)NEWLINE app.cli.add_command(commands.lint)NEWLINE app.cli.add_command(commands.clean)NEWLINE app.cli.add_command(commands.urls)NEWLINE |
'''NEWLINEFile name: common_utils.pyNEWLINEProgrammed by: Mike BernardNEWLINEDate: 2019-11-08NEWLINENEWLINECommon helper functions used in multiple scripts.NEWLINE'''NEWLINENEWLINEfrom nav.utils.constants import PASSNEWLINENEWLINENEWLINEdef weighted_avg(values, weights):NEWLINE '''NEWLINE Takes a list of values and a list of weights associatedNEWLINE with those values (index-to-index) and returns a weightedNEWLINE averaged of those values as a float.NEWLINENEWLINE :param values: `list` of values to be averagedNEWLINE :param weights: `list` of weights for each value (index-to-index)NEWLINENEWLINE :return: `float` The weighted average of the valuesNEWLINE '''NEWLINE denom = sum([1 / w ** 2 for w in weights])NEWLINE num = sum([1 / w ** 2 * v for v, w in zip(values, weights)])NEWLINENEWLINE return num / denomNEWLINENEWLINENEWLINEdef unit_test(module_name, tests):NEWLINE '''NEWLINE Run a set of test functions and print out the results.NEWLINE See test directory for examples of how to structure these testsNEWLINE and how to set up calling this function.NEWLINE NEWLINE :param module_name: `str` the name of the module being testedNEWLINE :param tests: `list` of functions to test as objectsNEWLINE '''NEWLINE passed = 0NEWLINE failed = 0NEWLINE fail_messages = []NEWLINENEWLINE for test in tests:NEWLINE status, description = test()NEWLINE if status == PASS:NEWLINE passed += 1NEWLINE else:NEWLINE failed += 1NEWLINE fail_messages.append(description)NEWLINENEWLINE print(module_name, 'unit test results: ', end='')NEWLINE print('{} out of {} tests passed.'.format(passed, len(tests)))NEWLINE if len(fail_messages) > 0:NEWLINE print('Failed tests:')NEWLINE for msg in fail_messages:NEWLINE print('\t' + msg)NEWLINE |
# coding: utf-8NEWLINENEWLINE"""NEWLINE FlashBlade REST APINEWLINENEWLINE A lightweight client for FlashBlade REST API 2.3, developed by Pure Storage, Inc. (http://www.purestorage.com/).NEWLINENEWLINE OpenAPI spec version: 2.3NEWLINE NEWLINE Generated by: https://github.com/swagger-api/swagger-codegen.gitNEWLINE"""NEWLINENEWLINENEWLINEimport pprintNEWLINEimport reNEWLINENEWLINEimport sixNEWLINEimport typingNEWLINENEWLINEfrom ....properties import PropertyNEWLINEif typing.TYPE_CHECKING:NEWLINE from pypureclient.flashblade.FB_2_3 import modelsNEWLINENEWLINEclass CertificateUse(object):NEWLINE """NEWLINE Attributes:NEWLINE swagger_types (dict): The key is attribute nameNEWLINE and the value is attribute type.NEWLINE attribute_map (dict): The key is attribute nameNEWLINE and the value is json key in definition.NEWLINE """NEWLINE swagger_types = {NEWLINE 'name': 'str',NEWLINE 'id': 'str',NEWLINE 'group': 'FixedReference',NEWLINE 'use': 'FixedReferenceWithRemote'NEWLINE }NEWLINENEWLINE attribute_map = {NEWLINE 'name': 'name',NEWLINE 'id': 'id',NEWLINE 'group': 'group',NEWLINE 'use': 'use'NEWLINE }NEWLINENEWLINE required_args = {NEWLINE }NEWLINENEWLINE def __init__(NEWLINE self,NEWLINE name=None, # type: strNEWLINE id=None, # type: strNEWLINE group=None, # type: models.FixedReferenceNEWLINE use=None, # type: models.FixedReferenceWithRemoteNEWLINE ):NEWLINE """NEWLINE Keyword args:NEWLINE name (str): Name of the object (e.g., a file system or snapshot).NEWLINE id (str): A non-modifiable, globally unique ID chosen by the system.NEWLINE group (FixedReference): A reference to a certificate group that is being used, if any, where this certificate is a member of the certificate-group. This field is `null` if the referenced use object is not using a group, but is rather using this certificate directly.NEWLINE use (FixedReferenceWithRemote): A reference to an object using this certificate.NEWLINE """NEWLINE if name is not None:NEWLINE self.name = nameNEWLINE if id is not None:NEWLINE self.id = idNEWLINE if group is not None:NEWLINE self.group = groupNEWLINE if use is not None:NEWLINE self.use = useNEWLINENEWLINE def __setattr__(self, key, value):NEWLINE if key not in self.attribute_map:NEWLINE raise KeyError("Invalid key `{}` for `CertificateUse`".format(key))NEWLINE self.__dict__[key] = valueNEWLINENEWLINE def __getattribute__(self, item):NEWLINE value = object.__getattribute__(self, item)NEWLINE if isinstance(value, Property):NEWLINE return NoneNEWLINE else:NEWLINE return valueNEWLINENEWLINE def to_dict(self):NEWLINE """Returns the model properties as a dict"""NEWLINE result = {}NEWLINENEWLINE for attr, _ in six.iteritems(self.swagger_types):NEWLINE if hasattr(self, attr):NEWLINE value = getattr(self, attr)NEWLINE if isinstance(value, list):NEWLINE result[attr] = list(map(NEWLINE lambda x: x.to_dict() if hasattr(x, "to_dict") else x,NEWLINE valueNEWLINE ))NEWLINE elif hasattr(value, "to_dict"):NEWLINE result[attr] = value.to_dict()NEWLINE elif isinstance(value, dict):NEWLINE result[attr] = dict(map(NEWLINE lambda item: (item[0], item[1].to_dict())NEWLINE if hasattr(item[1], "to_dict") else item,NEWLINE value.items()NEWLINE ))NEWLINE else:NEWLINE result[attr] = valueNEWLINE if issubclass(CertificateUse, dict):NEWLINE for key, value in self.items():NEWLINE result[key] = valueNEWLINENEWLINE return resultNEWLINENEWLINE def to_str(self):NEWLINE """Returns the string representation of the model"""NEWLINE return pprint.pformat(self.to_dict())NEWLINENEWLINE def __repr__(self):NEWLINE """For `print` and `pprint`"""NEWLINE return self.to_str()NEWLINENEWLINE def __eq__(self, other):NEWLINE """Returns true if both objects are equal"""NEWLINE if not isinstance(other, CertificateUse):NEWLINE return FalseNEWLINENEWLINE return self.__dict__ == other.__dict__NEWLINENEWLINE def __ne__(self, other):NEWLINE """Returns true if both objects are not equal"""NEWLINE return not self == otherNEWLINE |
#NEWLINE# Autogenerated by Thrift Compiler (0.11.0)NEWLINE#NEWLINE# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOINGNEWLINE#NEWLINE# options string: pyNEWLINE#NEWLINENEWLINEfrom thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationExceptionNEWLINEfrom thrift.protocol.TProtocol import TProtocolExceptionNEWLINEfrom thrift.TRecursive import fix_specNEWLINENEWLINEimport sysNEWLINEimport loggingNEWLINEfrom .ttypes import *NEWLINEfrom thrift.Thrift import TProcessorNEWLINEfrom thrift.transport import TTransportNEWLINENEWLINEall_structs = []NEWLINENEWLINENEWLINEclass Iface(object):NEWLINE def call(self, body):NEWLINE """NEWLINE Parameters:NEWLINE - bodyNEWLINE """NEWLINE passNEWLINENEWLINENEWLINEclass Client(Iface):NEWLINE def __init__(self, iprot, oprot=None):NEWLINE self._iprot = self._oprot = iprotNEWLINE if oprot is not None:NEWLINE self._oprot = oprotNEWLINE self._seqid = 0NEWLINENEWLINE def call(self, body):NEWLINE """NEWLINE Parameters:NEWLINE - bodyNEWLINE """NEWLINE self.send_call(body)NEWLINE return self.recv_call()NEWLINENEWLINE def send_call(self, body):NEWLINE self._oprot.writeMessageBegin("call", TMessageType.CALL, self._seqid)NEWLINE args = call_args()NEWLINE args.body = bodyNEWLINE args.write(self._oprot)NEWLINE self._oprot.writeMessageEnd()NEWLINE self._oprot.trans.flush()NEWLINENEWLINE def recv_call(self):NEWLINE iprot = self._iprotNEWLINE (fname, mtype, rseqid) = iprot.readMessageBegin()NEWLINE if mtype == TMessageType.EXCEPTION:NEWLINE x = TApplicationException()NEWLINE x.read(iprot)NEWLINE iprot.readMessageEnd()NEWLINE raise xNEWLINE result = call_result()NEWLINE result.read(iprot)NEWLINE iprot.readMessageEnd()NEWLINE if result.success is not None:NEWLINE return result.successNEWLINE raise TApplicationException(TApplicationException.MISSING_RESULT, "call failed: unknown result")NEWLINENEWLINENEWLINEclass Processor(Iface, TProcessor):NEWLINE def __init__(self, handler):NEWLINE self._handler = handlerNEWLINE self._processMap = {}NEWLINE self._processMap["call"] = Processor.process_callNEWLINENEWLINE def process(self, iprot, oprot):NEWLINE (name, type, seqid) = iprot.readMessageBegin()NEWLINE if name not in self._processMap:NEWLINE iprot.skip(TType.STRUCT)NEWLINE iprot.readMessageEnd()NEWLINE x = TApplicationException(TApplicationException.UNKNOWN_METHOD, "Unknown function %s" % (name))NEWLINE oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)NEWLINE x.write(oprot)NEWLINE oprot.writeMessageEnd()NEWLINE oprot.trans.flush()NEWLINE returnNEWLINE else:NEWLINE self._processMap[name](self, seqid, iprot, oprot)NEWLINE return TrueNEWLINENEWLINE def process_call(self, seqid, iprot, oprot):NEWLINE args = call_args()NEWLINE args.read(iprot)NEWLINE iprot.readMessageEnd()NEWLINE result = call_result()NEWLINE try:NEWLINE result.success = self._handler.call(args.body)NEWLINE msg_type = TMessageType.REPLYNEWLINE except TTransport.TTransportException:NEWLINE raiseNEWLINE except TApplicationException as ex:NEWLINE logging.exception("TApplication exception in handler")NEWLINE msg_type = TMessageType.EXCEPTIONNEWLINE result = exNEWLINE except Exception:NEWLINE logging.exception("Unexpected exception in handler")NEWLINE msg_type = TMessageType.EXCEPTIONNEWLINE result = TApplicationException(TApplicationException.INTERNAL_ERROR, "Internal error")NEWLINE oprot.writeMessageBegin("call", msg_type, seqid)NEWLINE result.write(oprot)NEWLINE oprot.writeMessageEnd()NEWLINE oprot.trans.flush()NEWLINENEWLINENEWLINE# HELPER FUNCTIONS AND STRUCTURESNEWLINENEWLINENEWLINEclass call_args(object):NEWLINE """NEWLINE Attributes:NEWLINE - bodyNEWLINE """NEWLINENEWLINE def __init__(self, body=None, ):NEWLINE self.body = bodyNEWLINENEWLINE def read(self, iprot):NEWLINE if iprot._fast_decode is not None and isinstance(iprot.trans,NEWLINE TTransport.CReadableTransport) and self.thrift_spec is not None:NEWLINE iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])NEWLINE returnNEWLINE iprot.readStructBegin()NEWLINE while True:NEWLINE (fname, ftype, fid) = iprot.readFieldBegin()NEWLINE if ftype == TType.STOP:NEWLINE breakNEWLINE if fid == 1:NEWLINE if ftype == TType.STRING:NEWLINE self.body = iprot.readString().decode("utf-8") if sys.version_info[0] == 2 else iprot.readString()NEWLINE else:NEWLINE iprot.skip(ftype)NEWLINE else:NEWLINE iprot.skip(ftype)NEWLINE iprot.readFieldEnd()NEWLINE iprot.readStructEnd()NEWLINENEWLINE def write(self, oprot):NEWLINE if oprot._fast_encode is not None and self.thrift_spec is not None:NEWLINE oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))NEWLINE returnNEWLINE oprot.writeStructBegin("call_args")NEWLINE if self.body is not None:NEWLINE oprot.writeFieldBegin("body", TType.STRING, 1)NEWLINE oprot.writeString(self.body.encode("utf-8") if sys.version_info[0] == 2 else self.body)NEWLINE oprot.writeFieldEnd()NEWLINE oprot.writeFieldStop()NEWLINE oprot.writeStructEnd()NEWLINENEWLINE def validate(self):NEWLINE returnNEWLINENEWLINE def __repr__(self):NEWLINE L = ["%s=%r" % (key, value)NEWLINE for key, value in self.__dict__.items()]NEWLINE return "%s(%s)" % (self.__class__.__name__, ", ".join(L))NEWLINENEWLINE def __eq__(self, other):NEWLINE return isinstance(other, self.__class__) and self.__dict__ == other.__dict__NEWLINENEWLINE def __ne__(self, other):NEWLINE return not (self == other)NEWLINENEWLINENEWLINEall_structs.append(call_args)NEWLINEcall_args.thrift_spec = (NEWLINE None, # 0NEWLINE (1, TType.STRING, "body", "UTF8", None,), # 1NEWLINE)NEWLINENEWLINENEWLINEclass call_result(object):NEWLINE """NEWLINE Attributes:NEWLINE - successNEWLINE """NEWLINENEWLINE def __init__(self, success=None, ):NEWLINE self.success = successNEWLINENEWLINE def read(self, iprot):NEWLINE if iprot._fast_decode is not None and isinstance(iprot.trans,NEWLINE TTransport.CReadableTransport) and self.thrift_spec is not None:NEWLINE iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])NEWLINE returnNEWLINE iprot.readStructBegin()NEWLINE while True:NEWLINE (fname, ftype, fid) = iprot.readFieldBegin()NEWLINE if ftype == TType.STOP:NEWLINE breakNEWLINE if fid == 0:NEWLINE if ftype == TType.STRING:NEWLINE self.success = iprot.readString().decode("utf-8") if sys.version_info[NEWLINE 0] == 2 else iprot.readString()NEWLINE else:NEWLINE iprot.skip(ftype)NEWLINE else:NEWLINE iprot.skip(ftype)NEWLINE iprot.readFieldEnd()NEWLINE iprot.readStructEnd()NEWLINENEWLINE def write(self, oprot):NEWLINE if oprot._fast_encode is not None and self.thrift_spec is not None:NEWLINE oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))NEWLINE returnNEWLINE oprot.writeStructBegin("call_result")NEWLINE if self.success is not None:NEWLINE oprot.writeFieldBegin("success", TType.STRING, 0)NEWLINE oprot.writeString(self.success.encode("utf-8") if sys.version_info[0] == 2 else self.success)NEWLINE oprot.writeFieldEnd()NEWLINE oprot.writeFieldStop()NEWLINE oprot.writeStructEnd()NEWLINENEWLINE def validate(self):NEWLINE returnNEWLINENEWLINE def __repr__(self):NEWLINE L = ["%s=%r" % (key, value)NEWLINE for key, value in self.__dict__.items()]NEWLINE return "%s(%s)" % (self.__class__.__name__, ", ".join(L))NEWLINENEWLINE def __eq__(self, other):NEWLINE return isinstance(other, self.__class__) and self.__dict__ == other.__dict__NEWLINENEWLINE def __ne__(self, other):NEWLINE return not (self == other)NEWLINENEWLINENEWLINEall_structs.append(call_result)NEWLINEcall_result.thrift_spec = (NEWLINE (0, TType.STRING, "success", "UTF8", None,), # 0NEWLINE)NEWLINEfix_spec(all_structs)NEWLINEdel all_structsNEWLINE |
#!/usr/bin/env pythonNEWLINENEWLINEimport numpy as npNEWLINEimport roslibNEWLINEimport rospyNEWLINEimport mathNEWLINEimport tfNEWLINEimport timeNEWLINEfrom nav_msgs.msg import OdometryNEWLINEfrom asv_msgs.msg import RobotGoal, SonarData, SonarDataListNEWLINEfrom asv_msgs.srv import SetRobotPath, SetRobotPathResponseNEWLINEfrom std_srvs.srv import SetBool, SetBoolResponseNEWLINEfrom geometry_msgs.msg import PoseNEWLINEfrom visualization_msgs.msg import Marker, MarkerArrayNEWLINENEWLINEclass OBS_AVOIDANCE():NEWLINE def __init__(self):NEWLINE self.node_name = rospy.get_name()NEWLINE self.dis_threshold = 2.0NEWLINE self.goal = NoneNEWLINE self.robot_pose = Pose()NEWLINE self.robot_orig = []NEWLINE self.r_goal = None # relative goal positionNEWLINE self.get_goal = FalseNEWLINE self.get_odom = FalseNEWLINE rospy.loginfo("[%s] Initializing " %(self.node_name))NEWLINENEWLINE self.pub_sonar_marker = rospy.Publisher('sonar_marker', MarkerArray, queue_size=1)NEWLINE self.pub_new_goal_marker = rospy.Publisher("new_goal_marker",Marker, queue_size=1)NEWLINENEWLINE self.pub_robot_goal = rospy.Publisher("robot_goal/obs", RobotGoal, queue_size = 1)NEWLINE rospy.Subscriber("robot_goal", RobotGoal, self.goal_cb, queue_size = 1, buff_size = 2**24)NEWLINE rospy.Subscriber("sonar", SonarDataList, self.sonar_cb, queue_size = 1, buff_size = 2**24)NEWLINE rospy.Subscriber('odometry', Odometry, self.odom_cb, queue_size = 1, buff_size = 2**24)NEWLINE NEWLINE def odom_cb(self, msg):NEWLINE self.get_odom = TrueNEWLINE robot_pose = Pose()NEWLINE robot_pose.position.x = msg.pose.pose.position.xNEWLINE robot_pose.position.y = msg.pose.pose.position.yNEWLINE quat = (msg.pose.pose.orientation.x,\NEWLINE msg.pose.pose.orientation.y,\NEWLINE msg.pose.pose.orientation.z,\NEWLINE msg.pose.pose.orientation.w)NEWLINENEWLINE self.robot_pose = robot_poseNEWLINENEWLINE def sonar_cb(self, msg):NEWLINE # 0 : downNEWLINE # 1 : leftNEWLINE # 2 : frontNEWLINE # 3 : rightNEWLINENEWLINE if len(msg.list) != 4 or self.get_odom == False:NEWLINE returnNEWLINENEWLINE marker_array = MarkerArray()NEWLINE for i in range(1, 4):NEWLINE marker = Marker()NEWLINE marker.header.frame_id = "map"NEWLINE marker.id = iNEWLINE marker.header.stamp = rospy.Time.now()NEWLINE marker.type = Marker.CUBENEWLINE marker.action = Marker.ADDNEWLINE marker.lifetime = rospy.Duration(0.5)NEWLINE marker.pose.position.x = self.robot_pose.position.xNEWLINE marker.pose.position.y = self.robot_pose.position.yNEWLINE marker.pose.position.z = self.robot_pose.position.zNEWLINE if i == 1:NEWLINE marker.pose.position.y = self.robot_pose.position.y + msg.list[i].distance/1000.NEWLINE marker.color.r = 1NEWLINE marker.color.g = 0NEWLINE marker.color.b = 0NEWLINE elif i == 2:NEWLINE marker.pose.position.x = self.robot_pose.position.x + msg.list[i].distance/1000.NEWLINE marker.color.r = 0NEWLINE marker.color.g = 1NEWLINE marker.color.b = 0NEWLINE elif i == 3:NEWLINE marker.pose.position.y = self.robot_pose.position.y - msg.list[i].distance/1000.NEWLINE marker.color.r = 0NEWLINE marker.color.g = 0NEWLINE marker.color.b = 1NEWLINE marker.pose.orientation.x = 0.0NEWLINE marker.pose.orientation.y = 0.0NEWLINE marker.pose.orientation.z = 0.0NEWLINE marker.pose.orientation.w = 1.0NEWLINE marker.scale.x = 0.3NEWLINE marker.scale.y = 0.3NEWLINE marker.scale.z = 0.3NEWLINE marker.color.a = 1NEWLINE marker_array.markers.append(marker)NEWLINE self.pub_sonar_marker.publish(marker_array)NEWLINENEWLINE '''new_goal = []NEWLINE new_goal = self.r_goal[:]NEWLINENEWLINE left_safe_dis = msg.list[1].distance - self.dis_thresholdNEWLINE front_safe_dis = msg.list[2].distance - self.dis_thresholdNEWLINE right_safe_dis = msg.list[3].distance - self.dis_thresholdNEWLINENEWLINE if front_safe_dis < new_goal[0]:NEWLINE new_goal[0] = front_safe_disNEWLINENEWLINE if right_safe_dis < new_goal[1]:NEWLINE if left_safe_dis < - new_goal[1]:NEWLINE new_goal[1] = (right_safe_dis - left_safe_dis)/2.NEWLINE else:NEWLINE new_goal[1] = right_safe_disNEWLINE elif left_safe_dis < - new_goal[1]:NEWLINE new_goal[1] = left_safe_disNEWLINENEWLINE rg = RobotGoal()NEWLINE rg.goal.position.x = new_goal[0] + self.robot_pose.position.xNEWLINE rg.goal.position.y = new_goal[1] + self.robot_pose.position.yNEWLINE rg.robot = self.robot_poseNEWLINE self.pub_robot_goal.publish(rg)'''NEWLINENEWLINE def goal_cb(self, msg):NEWLINE self.get_goal = TrueNEWLINE self.goal = [msg.goal.position.x, msg.goal.position.y]NEWLINE self.robot_position = [msg.robot.position.x, msg.robot.position.y]NEWLINE self.robot_pose = msg.robotNEWLINE quat = (msg.robot.orientation.x,\NEWLINE msg.robot.orientation.y,\NEWLINE msg.robot.orientation.z,\NEWLINE msg.robot.orientation.w)NEWLINE _, _, yaw = tf.transformations.euler_from_quaternion(quat)NEWLINENEWLINE if len(self.goal) == 0 or len(self.robot_position) == 0: # if the robot hasn't recieve any goalNEWLINE returnNEWLINENEWLINE self.r_goal = [self.goal[0] - self.robot_position[0], self.goal[1] - self.robot_position[1]]NEWLINENEWLINE def get_distance(self, p1, p2):NEWLINE return math.sqrt((p1[0] - p2[0])**2 + (p1[1] - p2[1])**2)NEWLINENEWLINEif __name__ == '__main__':NEWLINE rospy.init_node('OBS_AVOIDANCE')NEWLINE foo = OBS_AVOIDANCE()NEWLINE rospy.spin() |
# SPDX-License-Identifier: MITNEWLINENEWLINE""":class:`~calingen.interfaces.plugin_api.LayoutProvider` implementation for a simple event list.NEWLINENEWLINEWarningsNEWLINE--------NEWLINEThis layout is included in **django-calingen**'s CI test setup, mainly to verifyNEWLINEthat the :func:`TeX escaping <calingen.templatetags.calingen_escape.escape_tex>`NEWLINEis working.NEWLINENEWLINEThis may be object to future changes.NEWLINE"""NEWLINENEWLINE# app importsNEWLINEfrom calingen.interfaces.plugin_api import LayoutProviderNEWLINENEWLINENEWLINEclass SimpleEventList(LayoutProvider):NEWLINE """The actual implementation of the layout.NEWLINENEWLINE Nothing fancy here,NEWLINE :meth:`~calingen.contrib.layouts.simple_event_list.simple_event_list.SimpleEventList.prepare_context`NEWLINE is just used to group the providedNEWLINE :class:`calingen.interfaces.data_exchange.CalendarEntryList` by month.NEWLINENEWLINE WarningsNEWLINE --------NEWLINE The provided templates create a document targeted at German users. You mayNEWLINE override the templates to (fully) support other languages.NEWLINENEWLINE NotesNEWLINE -----NEWLINE To customize the generated TeX-sources, the following templates may beNEWLINE overridden:NEWLINENEWLINE - ``simple_event_list/tex/base.tex``: Speaking in TeX-terms: the preamble ofNEWLINE the document, including package definitions.NEWLINE - ``simple_event_list/tex/simple_event_list.tex``: Speaking in TeX-terms:NEWLINE the document's body.NEWLINE - ``simple_event_list/tex/single_entry_line.tex``: The actualNEWLINE TeX-representation of aNEWLINE :class:`calingen.interfaces.data_exchange.CalendarEntry`.NEWLINE """NEWLINENEWLINE name = "Simple Event List"NEWLINE paper_size = "a4"NEWLINE orientation = "portrait"NEWLINE layout_type = "tex"NEWLINE _template = "simple_event_list/tex/simple_event_list.tex"NEWLINENEWLINE @classmethodNEWLINE def prepare_context(cls, context):NEWLINE """Pre-process the ``entries`` to group them by month."""NEWLINE entries = context.pop("entries", [])NEWLINENEWLINE # put each month's entries in a dedicated listNEWLINE processed_entries = []NEWLINE try:NEWLINE loop_month = entries[0].timestamp.monthNEWLINE except IndexError:NEWLINE loop_month = "No Entries"NEWLINENEWLINE month_list = []NEWLINE for entry in entries:NEWLINE if entry.timestamp.month != loop_month:NEWLINE processed_entries.append(month_list)NEWLINE month_list = []NEWLINE loop_month = entry.timestamp.monthNEWLINE month_list.append(entry)NEWLINE if month_list:NEWLINE processed_entries.append(month_list)NEWLINENEWLINE context["entries"] = processed_entriesNEWLINE return contextNEWLINE |
"""NEWLINEProcess missing data within a dataset. NEWLINE"""NEWLINEimport missingno as msnoNEWLINEfrom pandas import DataFrameNEWLINEfrom pandas import SeriesNEWLINEfrom typing import ListNEWLINENEWLINENEWLINEdef visualize(df):NEWLINE """Plot missing cells heatmap"""NEWLINE msno.matrix(df)NEWLINENEWLINENEWLINEdef removeRows(df: DataFrame) -> DataFrame:NEWLINE """Removes all rows with NaN cells"""NEWLINE return(df.dropna().reset_index(drop=True))NEWLINENEWLINENEWLINEdef removeRowsByCol(df: DataFrame, col: str) -> DataFrame:NEWLINE """Removes all rows with missing cells in specified column"""NEWLINE return(df[~df[col].isna()].reset_index(drop=True))NEWLINENEWLINENEWLINEdef impute(df: DataFrame, col: str, strategy: str = "zero") -> DataFrame:NEWLINE """NEWLINE Impute missing data in column.NEWLINE df - data dataframeNEWLINE col - target column labelNEWLINE strategy - imputation strategyNEWLINE zero: replaces NA with 0NEWLINE mean: replaces NA with the meanNEWLINE median: replaces NA with the medianNEWLINE most frequent: replaces NA with one the modeNEWLINE empty: replaces NA with an empty str i.e. ""NEWLINE hot deck: replaces NA with a random sample of non-NA dataNEWLINE """NEWLINE data = df.copy()NEWLINENEWLINE if strategy == "zero":NEWLINE # works only with quant dataNEWLINE filler_data = 0NEWLINE elif strategy == "mean":NEWLINE # works only with quant dataNEWLINE filler_data = data[col].mean()NEWLINE elif strategy == "median":NEWLINE # works only with quant dataNEWLINE filler_data = data[col].median()NEWLINE elif strategy == "most frequent":NEWLINE filler_data = data[col].mode().sample()NEWLINE elif strategy == "empty":NEWLINE filler_data = ""NEWLINE elif strategy == "hot deck":NEWLINE # replaces NaNs with random samples from the valid data pool.NEWLINE # The sampling is with replacement incase the valid sampleNEWLINE # size is too smallNEWLINE valid_data = data[col][~data[col].isnull()]NEWLINE sample_len = len(data[col][data[col].isnull()])NEWLINE filler_data = valid_data.sample(sample_len, replace=True).valuesNEWLINE else:NEWLINE raise Exception("Not a valid impute strategy")NEWLINE data[col][data[col].isnull()] = filler_dataNEWLINE return(data)NEWLINENEWLINENEWLINEdef generateBinaries(df:DataFrame, cols: List[str]) -> DataFrame:NEWLINE """Add binary variables to specify whether cell is na"""NEWLINE data = df.copy()NEWLINE for col in cols:NEWLINE data[col+"_na"] = ~data[col].isnull()NEWLINE return(data)NEWLINENEWLINENEWLINEdef noMissingByCol(df: DataFrame) -> Series:NEWLINE """Count the number of missing cells in each column"""NEWLINE return(df.isna().sum())NEWLINENEWLINENEWLINEdef replaceDefects(df: DataFrame, col: str, replacement_pairs: dict) -> DataFrame:NEWLINE """Row replacement for str based columns"""NEWLINE data = df.copy()NEWLINE for key, item in replacement_pairs.items():NEWLINE data[col] = data[col].apply(lambda x: x.replace(key, item))NEWLINE return(data)NEWLINE |
#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINE#NEWLINENEWLINEimport pprintNEWLINENEWLINEfrom pystachio.naming import frozendictNEWLINEfrom twitter.common import appNEWLINENEWLINEfrom apache.thermos.cli.common import get_task_from_optionsNEWLINEfrom apache.thermos.common.options import add_binding_to, add_port_toNEWLINENEWLINENEWLINEdef inspect_unwrap(obj):NEWLINE if isinstance(obj, frozendict):NEWLINE return dict((key, inspect_unwrap(val)) for (key, val) in obj.items())NEWLINE if isinstance(obj, (list, tuple, set)):NEWLINE return tuple(inspect_unwrap(val) for val in obj)NEWLINE return [email protected]@app.command_option("--task", metavar="TASKNAME", default=None, dest='task',NEWLINE help="The thermos task within the config that should be inspected. Only "NEWLINE "required if there are multiple tasks exported from the thermos "NEWLINE "configuration.")[email protected]_option("--json", default=False, action='store_true', dest='json',NEWLINE help="Read the source file in json format instead of pystachio.")[email protected]_option("-P", "--port", type="string", nargs=1, action="callback",NEWLINE callback=add_port_to('prebound_ports'), dest="prebound_ports", default=[],NEWLINE metavar="NAME:PORT", help="bind named PORT to NAME.")[email protected]_option("-E", "--environment", type="string", nargs=1, action="callback",NEWLINE callback=add_binding_to('bindings'), default=[], dest="bindings",NEWLINE metavar="NAME=VALUE",NEWLINE help="bind the configuration environment variable NAME to VALUE.")NEWLINEdef inspect(args, options):NEWLINE """Inspect a thermos config and display the evaluated taskNEWLINENEWLINE Usage: thermos inspect [options] configNEWLINE """NEWLINE thermos_task = get_task_from_options(args, options)NEWLINE ti, _ = thermos_task.task().interpolate()NEWLINE pprint.pprint(inspect_unwrap(ti.get()), indent=4)NEWLINE |
#!/usr/bin/env python3NEWLINE# -*- coding:utf-8 -*-NEWLINE"""NEWLINEreceive_logs_direct.py published log messages are going to be broadcast to all the receivesNEWLINE"""NEWLINE# Pika is a pure-Python implementation of the AMQP 0-9-1 protocolNEWLINEimport pikaNEWLINEimport sysNEWLINENEWLINENEWLINE# guest user can only connect via localhostNEWLINE#credentials = pika.PlainCredentials('guest', 'guest')NEWLINEcredentials = pika.PlainCredentials('pi', 'macintosh')NEWLINEconnection = pika.BlockingConnection(pika.ConnectionParameters(host='192.168.31.156',NEWLINE port=5672,NEWLINE virtual_host='/',NEWLINE credentials=credentials))NEWLINEchannel = connection.channel()NEWLINE# declare the exchangeNEWLINEchannel.exchange_declare(exchange='direct_logs',NEWLINE exchange_type='direct')NEWLINENEWLINEresult = channel.queue_declare(exclusive=True)NEWLINEqueue_name = result.method.queueNEWLINENEWLINEseverities = sys.argv[1:]NEWLINENEWLINEif not severities:NEWLINE sys.stderr.write("Usage: %s [info] [warning] [error]\n" % sys.argv[0])NEWLINE sys.exit(1)NEWLINENEWLINEfor severity in severities:NEWLINE channel.queue_bind(exchange='direct_logs',NEWLINE queue=queue_name,NEWLINE routing_key=severity)NEWLINENEWLINENEWLINEprint(" [*] Waiting for logs. To exit press CTRL+C")NEWLINENEWLINENEWLINEdef callback(ch, method, properties, body):NEWLINE print(" [x] %r:%r" % (method.routing_key, body))NEWLINENEWLINENEWLINEchannel.basic_consume(callback,NEWLINE queue=queue_name,NEWLINE no_ack=True)NEWLINENEWLINEchannel.start_consuming()NEWLINE"""NEWLINEPlease keep in mind that this and other tutorials are, well, tutorials, They demonstrate one new concept at a time and mayNEWLINEintentionally oversimplify some things and leave out others. For example topics such as connection management, error handling,NEWLINEconnection recovery, concurrency and metric collection are largely omitted for the sake of brevity. Such simplified code NEWLINEshould not be considered production ready.NEWLINENEWLINE""" |
import disnakeNEWLINEfrom replit import dbNEWLINEfrom disnake.ext import commandsNEWLINEimport randomNEWLINEimport asyncioNEWLINEimport requestsNEWLINEimport jsonNEWLINEimport timeNEWLINEimport osNEWLINEapikey = os.environ['perapi']NEWLINENEWLINEfrom pyspective import pyspectiveNEWLINENEWLINEperspective = pyspective.PyspectiveAPI(apikey)NEWLINENEWLINEcashE = '<:YeetCoin:899166414546559056>'NEWLINENEWLINEclass SlashCommands(commands.Cog, name='Slash commands'):NEWLINE '''These are the slash commands'''NEWLINE def __init__(self, bot):NEWLINE self.bot = botNEWLINE NEWLINE @commands.slash_command(name='nsfw',description="It's not")NEWLINE async def nsfw(self, inter):NEWLINE await inter.response.send_message('https://tenor.com/view/rick-astly-rick-rolled-gif-22755440')NEWLINENEWLINE #economyNEWLINE global cashENEWLINENEWLINE @commands.slash_command(name='cash',description='Your cash')NEWLINE async def cash(self, inter,user:disnake.Member=None):NEWLINE if user is None:NEWLINE try:NEWLINE value = db[f'{inter.author.id}']NEWLINE await inter.response.send_message(f'You currently have {value} {cashE}')NEWLINE except KeyError:NEWLINE value = db[inter.author.id]='0'NEWLINE await inter.response.send_message(f'You currently have {value} {cashE}')NEWLINE else:NEWLINE try:NEWLINE value = db[f'{user.id}']NEWLINE await inter.response.send_message(f'{user.mention} currently have {value} {cashE}')NEWLINE except KeyError:NEWLINE value = db[f'{user.id}']='0'NEWLINE await inter.response.send_message(f'{user.mention} currently have {value} {cashE}')NEWLINENEWLINE NEWLINE @commands.slash_command(name='work',description='Work to get more coins')NEWLINE @commands.cooldown(rate=1, per=600)NEWLINE async def work(self, inter):NEWLINE e = random.randint(-250,250)NEWLINE try:NEWLINE value = int(db[f'{inter.author.id}'])NEWLINE value += eNEWLINE db[f'{inter.author.id}'] = f'{value}'NEWLINE if e<0:NEWLINE await inter.response.send_message(f'You messed things up! You spend {-e}{cashE} to make things back.')NEWLINE elif e>=0 and e<=50:NEWLINE await inter.response.send_message(f"What a lazy guy. You didn't work enough. That is why you only get {e}{cashE}.")NEWLINE else:NEWLINE await inter.response.send_message(f'You did a great job. You get {e}{cashE} for that.')NEWLINE except KeyError:NEWLINE db[inter.author.id]=f'{e}'NEWLINE if e<0:NEWLINE await inter.response.send_message(f'You messed things up! You spend {-e}{cashE} to make things back.')NEWLINE elif e<=0 and e<50:NEWLINE await inter.response.send_message(f"What a lazy guy. You didn't work enough. That is why you only get {e}{cashE}.")NEWLINE else:NEWLINE await inter.response.send_message(f'You did a great job. You get {e}{cashE} for that.')NEWLINENEWLINE NEWLINE @commands.slash_command(name='transfer',description='Give someone your cash with a little tax')NEWLINE async def give(self, inter,user:disnake.User,cash:int):NEWLINE try:NEWLINE value1 = int(db[f'{inter.author.id}'])NEWLINE value2 = int(db[f'{user.id}'])NEWLINE if value1 > cash:NEWLINE e=cash/100*80NEWLINE value1 -= cashNEWLINE db[f'{inter.author.id}'] = f'{value1}'NEWLINE value2 += eNEWLINE db[f'{user.id}'] = f'{value2}'NEWLINE await inter.response.send_message(f'You gave {e} to {user.mention} after 20% tax. Now, you have {value1} and they got {value2}.')NEWLINE else:NEWLINE await inter.response.send_message("You don't have enough cash to do it.")NEWLINE except KeyError:NEWLINE db[f'{user.id}'] = '0'NEWLINE value1 = int(db[f'{inter.author.id}'])NEWLINE value2 = int(db[f'{user.id}'])NEWLINE if value1 > cash:NEWLINE e=cash/100*80NEWLINE value1 -= cashNEWLINE db[f'{inter.author.id}'] = f'{value1}'NEWLINE value2 += eNEWLINE db[f'{user.id}'] = f'{value2}'NEWLINE await inter.response.send_message(f'You gave {e} to {user.mention} after 20% tax. Now, you have {value1} and they got {value2}.')NEWLINENEWLINE else:NEWLINE await inter.response.send_message("You don't have enough cash to do it.")NEWLINENEWLINE NEWLINE @commands.slash_command(name='test')NEWLINE async def test(self, inter):NEWLINE if inter.author.id == 832264231617167381 or inter.author.id == 543656290468102174:NEWLINE E = db[f'{inter.author.id}']NEWLINE e = int(E)NEWLINE e += 50000NEWLINE db[f'{inter.author.id}'] = f'{e}'NEWLINE await inter.response.send_message('Dev powah >:)')NEWLINENEWLINE NEWLINE @commands.slash_command(name='clear')NEWLINE async def clear(self, inter,user:disnake.User):NEWLINE if inter.author.id == 832264231617167381 or inter.author.id == 543656290468102174:NEWLINE db[f'{inter.author.id}'] = '0'NEWLINE await inter.response.send_message('Dev powah >>:)')NEWLINENEWLINENEWLINE @commands.slash_command(name='leaderboard',description='Show the top 20 richest users')NEWLINE async def lb(self, inter):NEWLINE e = {}NEWLINE high = {}NEWLINE for x in inter.guild.members:NEWLINE try:NEWLINE e.update({x.name: int(db[str(x.id)])})NEWLINE except KeyError:NEWLINE db[f"{x.id}"]='0'NEWLINE e.update({x.name: 0})NEWLINE high=dict(sorted(e.items(),key= lambda x:x[1], reverse = True))NEWLINE text = ''NEWLINE e = 0NEWLINE for x in high:NEWLINE if e == 20:NEWLINE returnNEWLINE else:NEWLINE text += f'{x}: {high[x]}\n'NEWLINE e+=1NEWLINE embed = disnake.Embed(title=f'Top highest in {inter.guild.name}',description=text,color=0x6ba4ff)NEWLINE await inter.response.send_message(embed=embed)NEWLINENEWLINE #GiveawayNEWLINENEWLINE @commands.slash_command(name='create_giveaway')NEWLINE @commands.has_permissions(manage_guild=True)NEWLINE async def cgw(self, inter,times:int,winners,*,prize):NEWLINE eh = time.time()NEWLINE x=0NEWLINE for x in range(0,times):NEWLINE eh+=1NEWLINE x+=1NEWLINE eh=int(eh)NEWLINE print(x)NEWLINE embed=disnake.Embed()NEWLINE embed.add_field(name=prize,value=f'React with 🎉 to enter!\nTime: <t:{eh}:R>\nHosted by: {inter.author.mention}')NEWLINE embed.set_footer(text = f'{winners} winner(s)')NEWLINE gwlink = await inter.response.send_message(embed=embed)NEWLINE await gwlink.add_reaction('🎉')NEWLINE await asyncio.sleep(x)NEWLINE for s in gwlink.reactions:NEWLINE if s.emoji.name == "tada":NEWLINE users = await s.users().flatten()NEWLINE winner = random.choice(users)NEWLINE await gwlink.channel.response.send_message(f'{winner.mention} has won the raffle.')NEWLINENEWLINE #Mod NEWLINENEWLINE @commands.slash_command(name='ban',description='Ban user')NEWLINE @commands.has_permissions(ban_members=True)NEWLINE async def ban(self, inter, user: disnake.Member, *, reason=None):NEWLINENEWLINE await inter.response.send_message(f'{user.mention} was banned. Reason: {reason}')NEWLINE await inter.guild.ban(user, reason=reason)NEWLINENEWLINE @commands.slash_command(name='kick',description='Kick user')NEWLINE @commands.has_permissions(kick_members=True)NEWLINE async def kick(self, inter, user: disnake.Member, *, reason=None):NEWLINENEWLINE await inter.guild.kick(user, reason=reason)NEWLINE await user.response.send_message(NEWLINE f'You got banned from {user.guild.name}. Reason:{reason} ')NEWLINE await inter.response.send_message(f'{user.mention} was banned. Reason: ')NEWLINENEWLINE @commands.slash_command(name='ban_list',description='Get the banned users list')NEWLINE @commands.has_permissions(ban_members=True)NEWLINE async def banList(self, inter):NEWLINENEWLINE embed = disnake.Embed(title=f'Banned user in {inter.guild.name}')NEWLINE bans = await inter.guild.bans()NEWLINE for x in bans:NEWLINE embed.add_field(NEWLINE name=NEWLINE f'User {x.user.name}#{x.user.discriminator} with ID: {x.user.id}',NEWLINE value=f'Reason: {x.reason}')NEWLINE await inter.author.response.send_message(embed=embed)NEWLINE await inter.response.send_message('Sent. Check your DM')NEWLINENEWLINE @commands.slash_command(name='unban',description='Unban user')NEWLINE @commands.has_permissions(ban_members=True)NEWLINE async def unban(self, inter, *, userid):NEWLINE userid = int(userid)NEWLINE user = await self.bot.fetch_user(userid)NEWLINE await inter.guild.unban(user)NEWLINE await inter.response.send_message(f'{user.mention} is unbanned!')NEWLINENEWLINE @commands.slash_command(name='nuke', description='Clone and delete a channel')NEWLINE @commands.has_permissions(manage_channels=True)NEWLINE async def nuke(self, inter):NEWLINE m = inter.channel.positionNEWLINE e = await inter.channel.clone()NEWLINE await inter.channel.delete()NEWLINE await e.edit(position=m)NEWLINE await e.response.send_message(f'{inter.message.author.mention} nuked the channel')NEWLINENEWLINE @commands.slash_command(name='check_toxicity',description='Check the toxicity of a word/sentence')NEWLINE async def ct(self, inter, *, other):NEWLINE scores = perspective.score(other)NEWLINE My_Attribute = scores["TOXICITY"]NEWLINE await inter.response.send_message(NEWLINE f"Toxicity test for {other} completed.\nIt's toxicity is {My_Attribute.score*100}"NEWLINE )NEWLINENEWLINE @commands.slash_command(name='mute', description='Mute user')NEWLINE @commands.has_permissions(manage_messages=True)NEWLINE async def mute(self, inter, user: disnake.Member, *, reson=None):NEWLINENEWLINE overwrite = disnake.PermissionOverwrite()NEWLINE overwrite.response.send_message_messages = FalseNEWLINE overwrite.read_messages = TrueNEWLINE breh = disnake.utils.get(inter.guild.roles, name="Muted by YAIS")NEWLINE if breh == None:NEWLINE await inter.guild.create_role(name="Muted by YAIS")NEWLINE await self.bot.add_roles(member=user, role=breh)NEWLINE for x in inter.guild.text_channels:NEWLINE await x.set_permissions(breh, overwrite=overwrite)NEWLINE await inter.response.send_message('Muted')NEWLINE else:NEWLINE await user.add_roles(breh)NEWLINE for x in inter.guild.text_channels:NEWLINE await x.set_permissions(breh, overwrite=overwrite)NEWLINE await inter.response.send_message(f'User {user} has been muted. Reason: {reson}')NEWLINENEWLINE @commands.slash_command(name='unmute',description='Unmute user')NEWLINE @commands.has_permissions(manage_messages=True)NEWLINE async def unmute(self, inter, user: disnake.Member, *, reson=None):NEWLINENEWLINE overwrite = disnake.PermissionOverwrite()NEWLINE overwrite.response.send_message_messages = FalseNEWLINE overwrite.read_messages = TrueNEWLINE breh = disnake.utils.get(inter.guild.roles, name="Muted by YAIS")NEWLINE if breh == None:NEWLINE await inter.guild.create_role(name="Muted by YAIS")NEWLINE await self.bot.remove_roles(member=user, role=breh)NEWLINE for x in inter.guild.text_channels:NEWLINE await x.set_permissions(breh, overwrite=overwrite)NEWLINE await inter.response.send_message('Muted')NEWLINE else:NEWLINE await user.remove_roles(breh)NEWLINE for x in inter.guild.text_channels:NEWLINE await x.set_permissions(breh, overwrite=overwrite)NEWLINE await inter.response.send_message(f'User {user} has been unmuted. Reason: {reson}')NEWLINENEWLINE @commands.slash_command(name='purge', description='Delete a number of messages')NEWLINE @commands.has_permissions(manage_messages=True)NEWLINE async def purge(self, inter, count: int):NEWLINENEWLINE count += 1NEWLINE deleted = await inter.channel.purge(limit=count)NEWLINE await inter.response.send_message(f'Deleted {len(deleted)-1} message', delete_after=3)NEWLINENEWLINE @commands.slash_command(name='role',description='Give/remove role from an user')NEWLINE @commands.has_permissions(manage_roles=True)NEWLINE async def role(self, inter, user: disnake.Member, role: disnake.Role):NEWLINENEWLINE if role in user.roles:NEWLINE await user.remove_roles(role)NEWLINE await inter.response.send_message(NEWLINE f'Successfully removed {user.mention} {role.mention}')NEWLINE else:NEWLINE await user.add_roles(role)NEWLINE await inter.response.send_message(f'Successfully added {user.mention} {role.mention}')NEWLINENEWLINE @commands.slash_command(name='is_scammer',description='Check is a user a scammer. Not always true')NEWLINE async def isScammer(self, inter, user: disnake.User):NEWLINE r = requests.get(NEWLINE f"https://disnakescammers.com/api/v1/search/{user.id}",NEWLINE verify=False)NEWLINE response = r.json()NEWLINE print(response['status'])NEWLINE if response['status'] == 'not_found':NEWLINE await inter.response.send_message('That user **might** not a scammer.')NEWLINE else:NEWLINE await inter.response.send_message('That user is a scammer.')NEWLINENEWLINE @commands.slash_command(name='report_scammer',description='Report scammer')NEWLINE async def reportScammer(self, inter, user: disnake.User, *, info):NEWLINE daata = {NEWLINE 'ScammerID': f"{user.id}",NEWLINE 'ScammerUsername': f"{user.name}",NEWLINE 'AdditionalInfo': infoNEWLINE }NEWLINE postME = json.dumps(daata)NEWLINE requests.post('https://disnakescammers.com/api/v1/report',NEWLINE data=postME,NEWLINE verify=False)NEWLINE await inter.response.send_message('Reported!')NEWLINENEWLINE #SuggestNEWLINENEWLINE @commands.slash_command(name='suggest', description='Suggest a idea')NEWLINE async def suggest(self, inter,*,idea):NEWLINE embedVar = disnake.Embed(title=f"Suggest from user with ID: {inter.author.id}", description=f'{idea}', color=0x6FB9FF)NEWLINE with open('cogs/isban.txt')as file:NEWLINE for isBanned in file:NEWLINE isBanned = int(isBanned)NEWLINE if inter.author.id != isBanned:NEWLINE with open('cogs/channel.txt')as f:NEWLINE for hey in f:NEWLINE hey=int(hey)NEWLINE channel = inter.guild.get_channel(hey)NEWLINE if channel is not None:NEWLINE hmm = await channel.response.send_message(content=inter.author.id,embed=embedVar)NEWLINE cross = '\N{THUMBS DOWN SIGN}'NEWLINE checkM = '\N{THUMBS UP SIGN}'NEWLINE await hmm.add_reaction(checkM)NEWLINE await hmm.add_reaction(cross)NEWLINE embedBreh = disnake.Embed(title='Sent',value='Your suggestion has been sent!')NEWLINE await inter.response.send_message(embed=embedBreh)NEWLINE else:NEWLINE inter.response.send_message("You have been banned from our system.")NEWLINE return 0NEWLINE @commands.slash_command(name='approve', description='Approve a suggestion')NEWLINE @commands.has_permissions(manage_messages=True)NEWLINE async def _approve(self, inter,id):NEWLINE id=int(id)NEWLINE global yayNEWLINE huh = await inter.channel.fetch_message(id)NEWLINE member = huh.contentNEWLINE member = int(member)NEWLINE user = await inter.bot.fetch_user(member)NEWLINE await huh.response.send_message(f'Suggest is approved!')NEWLINE await huh.edit(content=f'{user.mention} Your suggest has been approved!')NEWLINE NEWLINENEWLINE @commands.slash_command(name='decline', description='Decline a suggestion',)NEWLINE @commands.has_permissions(manage_messages=True)NEWLINE async def _decline(self, inter,id):NEWLINE id=int(id)NEWLINE global yayNEWLINE huh = await inter.fetch_message(id)NEWLINE await huh.response.send_message(f'{huh.author.mention} Your suggest has been declined!')NEWLINE await huh.edit(content='Declined.')NEWLINENEWLINE NEWLINENEWLINE @commands.slash_command(name='setup', description='Set up channel that suggestions will be sent to it')NEWLINE @commands.has_permissions(manage_channels=True)NEWLINE async def _setup(self, inter,id=None):NEWLINE if id is None:NEWLINE with open('cogs/channel.txt','a') as f:NEWLINE f.write('\n')NEWLINE f.write(str(inter.channel.id))NEWLINE else:NEWLINE with open('cogs/channel.txt','a') as f:NEWLINE f.write('\n')NEWLINE f.write(id)NEWLINE embedVar = disnake.Embed(title="Set up done!",color=0x85C4FF)NEWLINE await inter.response.send_message(embed=embedVar)NEWLINENEWLINE @commands.slash_command(name='report',description='Report a suggestion')NEWLINE async def _report(self, inter,messagelink):NEWLINE re = await inter.bot.fetch_channel(883956344472895529)NEWLINE await re.response.send_message(content=messagelink)NEWLINE await inter.response.send_message(content='Sent')NEWLINENEWLINEdef setup(bot):NEWLINE bot.add_cog(SlashCommands(bot)) |
from django.db import migrationsNEWLINENEWLINENEWLINEdef create_site(apps, schema_editor):NEWLINE Site = apps.get_model("sites", "Site")NEWLINE custom_domain = "app-1-33544.botics.co"NEWLINENEWLINE site_params = {NEWLINE "name": "App 1",NEWLINE }NEWLINE if custom_domain:NEWLINE site_params["domain"] = custom_domainNEWLINENEWLINE Site.objects.update_or_create(defaults=site_params, id=1)NEWLINENEWLINENEWLINEclass Migration(migrations.Migration):NEWLINENEWLINE dependencies = [NEWLINE ("sites", "0002_alter_domain_unique"),NEWLINE ]NEWLINENEWLINE operations = [NEWLINE migrations.RunPython(create_site),NEWLINE ]NEWLINE |
import pytestNEWLINEfrom click.testing import CliRunnerNEWLINEfrom nest import trans_cliNEWLINENEWLINENEWLINEclass TestTranslate:NEWLINENEWLINE @pytest.fixture(autouse=True)NEWLINE def setup(self):NEWLINE self.runner = CliRunner()NEWLINENEWLINE def test_trans_cli(self):NEWLINE result = self.runner.invoke(trans_cli, ['hello'])NEWLINE assert result.exit_code == 0NEWLINE assert '你好' in result.outputNEWLINE NEWLINE def test_trans_cli_engine(self):NEWLINE result = self.runner.invoke(trans_cli, ['hello', '--engine=youdao'])NEWLINE assert 'youdao' in result.outputNEWLINENEWLINE def test_trans_cli_source_and_to(self):NEWLINE passNEWLINE |
from datetime import dateNEWLINENEWLINEimport database.db_actions_general as db_genNEWLINEfrom database.db_connect import CURSOR, DBNEWLINENEWLINENEWLINE############################NEWLINE# Add bookreadNEWLINE############################NEWLINEdef add_bookread_to_db(NEWLINE bookread_id: str,NEWLINE book_id: int,NEWLINE start_date: date,NEWLINE end_date: date,NEWLINE days: int,NEWLINE score: int = None,NEWLINE comment: str = None,NEWLINE) -> None:NEWLINE """Log information about a read book into the DB"""NEWLINENEWLINE content = (book_id, start_date, end_date, days, score, comment, bookread_id)NEWLINE addition_query = """NEWLINE INSERT INTO Bookread (book_id, start_reading_date, end_reading_date,NEWLINE days_passed, out_of_ten_score, comment, bookread_pk)NEWLINE VALUES (%s, %s, %s, %s, %s, %s, %s)NEWLINE """NEWLINE CURSOR.execute(addition_query, content)NEWLINE DB.commit()NEWLINENEWLINENEWLINE############################NEWLINE# Delete bookreadNEWLINE############################NEWLINEdef remove_bookread_given_id(bookread_id: str) -> None:NEWLINE """Remove a book from the DB given its ID"""NEWLINE db_gen.validate_input_type(bookread_id, str)NEWLINE query = where_equal_bookreadid(bookread_id)NEWLINE remove_bookread_general(query)NEWLINENEWLINENEWLINEdef remove_bookread_general(delete_condition_query: str) -> None:NEWLINE """Remove a bookread info from the DB given a general conditional query"""NEWLINE db_gen.remove_general(CURSOR, DB, "Bookread", delete_condition_query)NEWLINENEWLINENEWLINE############################NEWLINE# Retrive last bookreadNEWLINE############################NEWLINEdef get_last_bookread(fields: db_gen.FieldsInput = "All") -> tuple:NEWLINE """Retrive the last bookread info added to the database"""NEWLINE fields = db_gen.parse_field_input(fields)NEWLINE last_id = get_last_bookread_id()NEWLINE query = (NEWLINE f"SELECT {fields} FROM Bookread "NEWLINE + where_equal_bookreadid(last_id)NEWLINE + " LIMIT 0, 1"NEWLINE )NEWLINE CURSOR.execute(query)NEWLINE return CURSOR.fetchone()NEWLINENEWLINENEWLINEdef get_last_bookread_id() -> str:NEWLINE """Retrive the ID of the last book added in the DB"""NEWLINE return db_gen.get_last_id_general(CURSOR, "Bookread", "bookread_pk")NEWLINENEWLINENEWLINE############################NEWLINE# Conditions for WHERE statementsNEWLINE############################NEWLINEdef where_equal_bookreadid(bookread_id: str) -> str:NEWLINE return f"WHERE bookread_pk = '{bookread_id}'"NEWLINE |
import pendulumNEWLINEfrom dagster.core.scheduler.job import JobStatusNEWLINEfrom dagster_graphql.test.utils import (NEWLINE execute_dagster_graphql,NEWLINE infer_repository_selector,NEWLINE infer_schedule_selector,NEWLINE main_repo_location_name,NEWLINE main_repo_name,NEWLINE)NEWLINENEWLINEGET_SCHEDULES_QUERY = """NEWLINEquery SchedulesQuery($repositorySelector: RepositorySelector!) {NEWLINE schedulesOrError(repositorySelector: $repositorySelector) {NEWLINE __typenameNEWLINE ... on PythonError {NEWLINE messageNEWLINE stackNEWLINE }NEWLINE ... on Schedules {NEWLINE results {NEWLINE nameNEWLINE cronScheduleNEWLINE pipelineNameNEWLINE solidSelectionNEWLINE modeNEWLINE executionTimezoneNEWLINE }NEWLINE }NEWLINE }NEWLINE}NEWLINE"""NEWLINENEWLINEGET_SCHEDULE_QUERY = """NEWLINEquery getSchedule($scheduleSelector: ScheduleSelector!, $ticksAfter: Float) {NEWLINE scheduleOrError(scheduleSelector: $scheduleSelector) {NEWLINE __typenameNEWLINE ... on PythonError {NEWLINE messageNEWLINE stackNEWLINE }NEWLINE ... on Schedule {NEWLINE nameNEWLINE partitionSet {NEWLINE nameNEWLINE }NEWLINE executionTimezoneNEWLINE futureTicks(limit: 3, cursor: $ticksAfter) {NEWLINE results {NEWLINE timestampNEWLINE }NEWLINE cursorNEWLINE }NEWLINE }NEWLINE }NEWLINE}NEWLINE"""NEWLINENEWLINERECONCILE_SCHEDULER_STATE_QUERY = """NEWLINEmutation(NEWLINE $repositorySelector: RepositorySelector!NEWLINE) {NEWLINE reconcileSchedulerState(NEWLINE repositorySelector: $repositorySelector,NEWLINE ) {NEWLINE ... on PythonError {NEWLINE messageNEWLINE stackNEWLINE }NEWLINE ... on ReconcileSchedulerStateSuccess {NEWLINE messageNEWLINE }NEWLINE }NEWLINE}NEWLINE"""NEWLINENEWLINENEWLINESTART_SCHEDULES_QUERY = """NEWLINEmutation(NEWLINE $scheduleSelector: ScheduleSelector!NEWLINE) {NEWLINE startSchedule(NEWLINE scheduleSelector: $scheduleSelector,NEWLINE ) {NEWLINE ... on PythonError {NEWLINE messageNEWLINE classNameNEWLINE stackNEWLINE }NEWLINE ... on ScheduleStateResult {NEWLINE scheduleState {NEWLINE idNEWLINE statusNEWLINE }NEWLINE }NEWLINE }NEWLINE}NEWLINE"""NEWLINENEWLINENEWLINESTOP_SCHEDULES_QUERY = """NEWLINEmutation(NEWLINE $scheduleOriginId: String!NEWLINE) {NEWLINE stopRunningSchedule(NEWLINE scheduleOriginId: $scheduleOriginId,NEWLINE ) {NEWLINE ... on PythonError {NEWLINE messageNEWLINE classNameNEWLINE stackNEWLINE }NEWLINE ... on ScheduleStateResult {NEWLINE scheduleState {NEWLINE idNEWLINE statusNEWLINE }NEWLINE }NEWLINE }NEWLINE}NEWLINE"""NEWLINENEWLINENEWLINEdef default_execution_params():NEWLINE return {NEWLINE "runConfigData": {"intermediate_storage": {"filesystem": None}},NEWLINE "selector": {"name": "no_config_pipeline", "solidSelection": None},NEWLINE "mode": "default",NEWLINE }NEWLINENEWLINENEWLINEdef test_get_schedule_definitions_for_repository(graphql_context):NEWLINE selector = infer_repository_selector(graphql_context)NEWLINE result = execute_dagster_graphql(NEWLINE graphql_context, GET_SCHEDULES_QUERY, variables={"repositorySelector": selector},NEWLINE )NEWLINENEWLINE assert result.dataNEWLINE assert result.data["schedulesOrError"]NEWLINE assert result.data["schedulesOrError"]["__typename"] == "Schedules"NEWLINENEWLINE external_repository = graphql_context.get_repository_location(NEWLINE main_repo_location_name()NEWLINE ).get_repository(main_repo_name())NEWLINENEWLINE results = result.data["schedulesOrError"]["results"]NEWLINE assert len(results) == len(external_repository.get_external_schedules())NEWLINENEWLINE for schedule in results:NEWLINE if schedule["name"] == "timezone_schedule":NEWLINE assert schedule["executionTimezone"] == "US/Central"NEWLINENEWLINENEWLINEdef test_start_and_stop_schedule(graphql_context):NEWLINE external_repository = graphql_context.get_repository_location(NEWLINE main_repo_location_name()NEWLINE ).get_repository(main_repo_name())NEWLINE graphql_context.instance.reconcile_scheduler_state(external_repository)NEWLINENEWLINE schedule_selector = infer_schedule_selector(NEWLINE graphql_context, "no_config_pipeline_hourly_schedule"NEWLINE )NEWLINENEWLINE # Start a single scheduleNEWLINE start_result = execute_dagster_graphql(NEWLINE graphql_context, START_SCHEDULES_QUERY, variables={"scheduleSelector": schedule_selector},NEWLINE )NEWLINE assert start_result.data["startSchedule"]["scheduleState"]["status"] == JobStatus.RUNNING.valueNEWLINENEWLINE schedule_origin_id = start_result.data["startSchedule"]["scheduleState"]["id"]NEWLINENEWLINE # Stop a single scheduleNEWLINE stop_result = execute_dagster_graphql(NEWLINE graphql_context, STOP_SCHEDULES_QUERY, variables={"scheduleOriginId": schedule_origin_id},NEWLINE )NEWLINE assert (NEWLINE stop_result.data["stopRunningSchedule"]["scheduleState"]["status"]NEWLINE == JobStatus.STOPPED.valueNEWLINE )NEWLINENEWLINENEWLINEdef test_get_single_schedule_definition(graphql_context):NEWLINE context = graphql_contextNEWLINE instance = context.instanceNEWLINENEWLINE instance.reconcile_scheduler_state(NEWLINE external_repository=context.get_repository_location(NEWLINE main_repo_location_name()NEWLINE ).get_repository(main_repo_name()),NEWLINE )NEWLINENEWLINE schedule_selector = infer_schedule_selector(context, "partition_based_multi_mode_decorator")NEWLINENEWLINE result = execute_dagster_graphql(NEWLINE context, GET_SCHEDULE_QUERY, variables={"scheduleSelector": schedule_selector}NEWLINE )NEWLINENEWLINE assert result.dataNEWLINENEWLINE assert result.data["scheduleOrError"]["__typename"] == "Schedule"NEWLINE assert result.data["scheduleOrError"]["partitionSet"]NEWLINE assert result.data["scheduleOrError"]["executionTimezone"] == pendulum.now().timezone.nameNEWLINENEWLINE future_ticks = result.data["scheduleOrError"]["futureTicks"]NEWLINE assert future_ticksNEWLINE assert len(future_ticks["results"]) == 3NEWLINENEWLINE schedule_selector = infer_schedule_selector(context, "timezone_schedule")NEWLINENEWLINE future_ticks_start_time = pendulum.create(2019, 2, 27, tz="US/Central").timestamp()NEWLINENEWLINE result = execute_dagster_graphql(NEWLINE context,NEWLINE GET_SCHEDULE_QUERY,NEWLINE variables={"scheduleSelector": schedule_selector, "ticksAfter": future_ticks_start_time},NEWLINE )NEWLINENEWLINE assert result.dataNEWLINE assert result.data["scheduleOrError"]["__typename"] == "Schedule"NEWLINE assert result.data["scheduleOrError"]["executionTimezone"] == "US/Central"NEWLINENEWLINE future_ticks = result.data["scheduleOrError"]["futureTicks"]NEWLINE assert future_ticksNEWLINE assert len(future_ticks["results"]) == 3NEWLINE timestamps = [future_tick["timestamp"] for future_tick in future_ticks["results"]]NEWLINENEWLINE assert timestamps == [NEWLINE pendulum.create(2019, 2, 27, tz="US/Central").timestamp(),NEWLINE pendulum.create(2019, 2, 28, tz="US/Central").timestamp(),NEWLINE pendulum.create(2019, 3, 1, tz="US/Central").timestamp(),NEWLINE ]NEWLINENEWLINE cursor = future_ticks["cursor"]NEWLINENEWLINE assert future_ticks["cursor"] == (pendulum.create(2019, 3, 1, tz="US/Central").timestamp() + 1)NEWLINENEWLINE result = execute_dagster_graphql(NEWLINE context,NEWLINE GET_SCHEDULE_QUERY,NEWLINE variables={"scheduleSelector": schedule_selector, "ticksAfter": cursor},NEWLINE )NEWLINENEWLINE future_ticks = result.data["scheduleOrError"]["futureTicks"]NEWLINENEWLINE assert future_ticksNEWLINE assert len(future_ticks["results"]) == 3NEWLINE timestamps = [future_tick["timestamp"] for future_tick in future_ticks["results"]]NEWLINENEWLINE assert timestamps == [NEWLINE pendulum.create(2019, 3, 2, tz="US/Central").timestamp(),NEWLINE pendulum.create(2019, 3, 3, tz="US/Central").timestamp(),NEWLINE pendulum.create(2019, 3, 4, tz="US/Central").timestamp(),NEWLINE ]NEWLINE |
import _plotly_utils.basevalidatorsNEWLINENEWLINENEWLINEclass LenValidator(_plotly_utils.basevalidators.NumberValidator):NEWLINE def __init__(NEWLINE self, plotly_name="len", parent_name="scatterpolargl.marker.colorbar", **kwargsNEWLINE ):NEWLINE super(LenValidator, self).__init__(NEWLINE plotly_name=plotly_name,NEWLINE parent_name=parent_name,NEWLINE edit_type=kwargs.pop("edit_type", "calc"),NEWLINE min=kwargs.pop("min", 0),NEWLINE **kwargsNEWLINE )NEWLINE |
#!/usr/bin/env python3NEWLINE# -*- coding: utf-8 -*-NEWLINENEWLINE# @author Copyright (c) 2022 Damir Dzeko AnticNEWLINE# @license MIT No AttributionNEWLINE# @version 0.1.2NEWLINE# @lastUpdate 2022-02-05NEWLINENEWLINE# ChangeLog:NEWLINE# - can be tested with: python3 -m unittest tomtomLookup.pyNEWLINE# - added object destructor to close the session/socketNEWLINENEWLINENEWLINEimport sysNEWLINEtry:NEWLINE assert (sys.version_info.major == 3 and sys.version_info.minor >= 7), "Python version must be 3.7 or newer"NEWLINEexcept Exception as e:NEWLINE print (e)NEWLINE sys.exit(1)NEWLINENEWLINEimport timeNEWLINEfrom os import environNEWLINENEWLINEfrom datetime import timedeltaNEWLINENEWLINEfrom requests_cache import CachedSessionNEWLINEimport unittestNEWLINEimport jsonNEWLINENEWLINENEWLINETOMTOM_AUTH_KEY = environ.get("TOMTOM_AUTH_KEY")NEWLINENEWLINEdef tomtom_url(gps_od, gps_do):NEWLINE def prefix():NEWLINE return 'https://api.tomtom.com/routing/1/calculateRoute/'NEWLINE def suffix():NEWLINE return (f'/json?key={TOMTOM_AUTH_KEY}&routeRepresentation=summaryOnly&maxAlternatives=0' + NEWLINE '&computeTravelTimeFor=none&routeType=fastest&traffic=false&travelMode=car')NEWLINE return f'{prefix()}{",".join(gps_od)}:{",".join(gps_do)}{suffix()}'NEWLINENEWLINENEWLINEclass TomTomLookup():NEWLINENEWLINE def _make_throttle_hook(timeout=1.0):NEWLINE """Make a request hook function that adds a custom delay for non-cached requests"""NEWLINE def hook(response, *args, **kwargs):NEWLINE if not getattr(response, 'from_cache', False):NEWLINE # print('sleeping')NEWLINE time.sleep(timeout)NEWLINE return responseNEWLINE return hookNEWLINENEWLINE def __init__(self):NEWLINE session = CachedSession('./requests_cache.db', NEWLINE backend='sqlite', NEWLINE timeout=30, NEWLINE expire_after=timedelta(days=30),NEWLINE old_data_on_error=True,NEWLINE serializer='json')NEWLINE session.hooks['response'].append(TomTomLookup._make_throttle_hook(1.25))NEWLINE self.session = sessionNEWLINENEWLINE def getUrl(self, url):NEWLINE response = self.session.get(url)NEWLINE if response.status_code != 200:NEWLINE raise Exception("TomTomLookup: GET call returned invalid response")NEWLINE return response.textNEWLINENEWLINE def getDistance_from_resp(self, response_text):NEWLINE try:NEWLINE json_obj = json.loads(response_text)NEWLINE return json_obj['routes'][0]['summary']['lengthInMeters']NEWLINE except:NEWLINE raise Exception("TomTomLookup: Failed to decode REST API response")NEWLINENEWLINE def getDistance_from_url(self, url):NEWLINE response_text = self.getUrl(url)NEWLINE return self.getDistance_from_resp(response_text)NEWLINENEWLINE def __del__(self):NEWLINE self.session.close()NEWLINENEWLINEclass TestTomTomLookup(unittest.TestCase):NEWLINE def setUp(self):NEWLINE self.tomtom = TomTomLookup()NEWLINENEWLINE def test_one_url(self):NEWLINE response_text = self.tomtom.getUrl('http://httpbin.org/delay/3')NEWLINE response_obj = json.loads(response_text)NEWLINE self.assertTrue(response_obj['url'] is not None)NEWLINENEWLINENEWLINEdef main():NEWLINE print(f'{__file__} should not be run as stand-alone program')NEWLINE return 2NEWLINENEWLINEif __name__ == '__main__':NEWLINE sys.exit(main()) |
# coding: utf-8NEWLINENEWLINE"""NEWLINE KubernetesNEWLINENEWLINE No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)NEWLINENEWLINE OpenAPI spec version: v1.13.5NEWLINE NEWLINE Generated by: https://github.com/swagger-api/swagger-codegen.gitNEWLINE"""NEWLINENEWLINENEWLINEfrom __future__ import absolute_importNEWLINENEWLINEimport osNEWLINEimport sysNEWLINEimport unittestNEWLINENEWLINEimport kubernetes.clientNEWLINEfrom kubernetes.client.rest import ApiExceptionNEWLINEfrom kubernetes.client.models.v1_rolling_update_stateful_set_strategy import V1RollingUpdateStatefulSetStrategyNEWLINENEWLINENEWLINEclass TestV1RollingUpdateStatefulSetStrategy(unittest.TestCase):NEWLINE """ V1RollingUpdateStatefulSetStrategy unit test stubs """NEWLINENEWLINE def setUp(self):NEWLINE passNEWLINENEWLINE def tearDown(self):NEWLINE passNEWLINENEWLINE def testV1RollingUpdateStatefulSetStrategy(self):NEWLINE """NEWLINE Test V1RollingUpdateStatefulSetStrategyNEWLINE """NEWLINE # FIXME: construct object with mandatory attributes with example valuesNEWLINE #model = kubernetes.client.models.v1_rolling_update_stateful_set_strategy.V1RollingUpdateStatefulSetStrategy()NEWLINE passNEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE unittest.main()NEWLINE |
#!/usr/bin/python3NEWLINE# -*- coding: utf-8 -*-NEWLINENEWLINE#NEWLINE# Copyright (C) 2017 Kévin MathieuNEWLINE#NEWLINE# This software may be modified and distributed under the termsNEWLINE# of the MIT license. See the LICENSE file for details.NEWLINE#NEWLINENEWLINEimport requestsNEWLINENEWLINENEWLINEclass URL:NEWLINE def __init__(self, url):NEWLINE self.baseUrl = urlNEWLINENEWLINE def call(self, params = None):NEWLINE req = requests.get(self.baseUrl, params=params)NEWLINENEWLINE return reqNEWLINE |
from sympy.sets import (ConditionSet, Intersection, FiniteSet,NEWLINE EmptySet, Union, Contains, imageset)NEWLINEfrom sympy import (Symbol, Eq, S, Abs, sin, asin, pi, Interval,NEWLINE And, Mod, oo, Function, Lambda)NEWLINEfrom sympy.testing.pytest import raises, XFAIL, warns_deprecated_sympyNEWLINENEWLINENEWLINEw = Symbol('w')NEWLINEx = Symbol('x')NEWLINEy = Symbol('y')NEWLINEz = Symbol('z')NEWLINEL = Symbol('lambda')NEWLINEf = Function('f')NEWLINENEWLINENEWLINEdef test_CondSet():NEWLINE sin_sols_principal = ConditionSet(x, Eq(sin(x), 0),NEWLINE Interval(0, 2*pi, False, True))NEWLINE assert pi in sin_sols_principalNEWLINE assert pi/2 not in sin_sols_principalNEWLINE assert 3*pi not in sin_sols_principalNEWLINE assert 5 in ConditionSet(x, x**2 > 4, S.Reals)NEWLINE assert 1 not in ConditionSet(x, x**2 > 4, S.Reals)NEWLINE # in this case, 0 is not part of the base set soNEWLINE # it can't be in any subset selected by the conditionNEWLINE assert 0 not in ConditionSet(x, y > 5, Interval(1, 7))NEWLINE # since 'in' requires a true/false, the following raisesNEWLINE # an error because the given value provides no informationNEWLINE # for the condition to evaluate (since the condition doesNEWLINE # not depend on the dummy symbol): the result is `y > 5`.NEWLINE # In this case, ConditionSet is just acting likeNEWLINE # Piecewise((Interval(1, 7), y > 5), (S.EmptySet, True)).NEWLINE raises(TypeError, lambda: 6 in ConditionSet(x, y > 5, Interval(1, 7)))NEWLINENEWLINE assert isinstance(ConditionSet(x, x < 1, {x, y}).base_set, FiniteSet)NEWLINE raises(TypeError, lambda: ConditionSet(x, x + 1, {x, y}))NEWLINE raises(TypeError, lambda: ConditionSet(x, x, 1))NEWLINENEWLINE I = S.IntegersNEWLINE C = ConditionSetNEWLINE assert C(x, x < 1, C(x, x < 2, I)NEWLINE ) == C(x, (x < 1) & (x < 2), I)NEWLINE assert C(y, y < 1, C(x, y < 2, I)NEWLINE ) == C(x, (x < 1) & (y < 2), I)NEWLINE assert C(y, y < 1, C(x, x < 2, I)NEWLINE ) == C(y, (y < 1) & (y < 2), I)NEWLINE assert C(y, y < 1, C(x, y < x, I)NEWLINE ) == C(x, (x < 1) & (y < x), I)NEWLINE assert C(y, x < 1, C(x, y < x, I)NEWLINE ) == C(L, (x < 1) & (y < L), I)NEWLINE c = C(y, x < 1, C(x, L < y, I))NEWLINE assert c == C(c.sym, (L < y) & (x < 1), I)NEWLINE assert c.sym not in (x, y, L)NEWLINE c = C(y, x < 1, C(x, y < x, FiniteSet(L)))NEWLINE assert c == C(L, And(x < 1, y < L), FiniteSet(L))NEWLINENEWLINENEWLINEdef test_CondSet_intersect():NEWLINE input_conditionset = ConditionSet(x, x**2 > 4, Interval(1, 4, False, False))NEWLINE other_domain = Interval(0, 3, False, False)NEWLINE output_conditionset = ConditionSet(x, x**2 > 4, Interval(1, 3, False, False))NEWLINE assert Intersection(input_conditionset, other_domain) == output_conditionsetNEWLINENEWLINENEWLINEdef test_issue_9849():NEWLINE assert ConditionSet(x, Eq(x, x), S.Naturals) == S.NaturalsNEWLINE assert ConditionSet(x, Eq(Abs(sin(x)), -1), S.Naturals) == S.EmptySetNEWLINENEWLINENEWLINEdef test_simplified_FiniteSet_in_CondSet():NEWLINE assert ConditionSet(x, And(x < 1, x > -3), FiniteSet(0, 1, 2)) == FiniteSet(0)NEWLINE assert ConditionSet(x, x < 0, FiniteSet(0, 1, 2)) == EmptySetNEWLINE assert ConditionSet(x, And(x < -3), EmptySet) == EmptySetNEWLINE y = Symbol('y')NEWLINE assert (ConditionSet(x, And(x > 0), FiniteSet(-1, 0, 1, y)) ==NEWLINE Union(FiniteSet(1), ConditionSet(x, And(x > 0), FiniteSet(y))))NEWLINE assert (ConditionSet(x, Eq(Mod(x, 3), 1), FiniteSet(1, 4, 2, y)) ==NEWLINE Union(FiniteSet(1, 4), ConditionSet(x, Eq(Mod(x, 3), 1), FiniteSet(y))))NEWLINENEWLINENEWLINEdef test_free_symbols():NEWLINE assert ConditionSet(x, Eq(y, 0), FiniteSet(z)NEWLINE ).free_symbols == {y, z}NEWLINE assert ConditionSet(x, Eq(x, 0), FiniteSet(z)NEWLINE ).free_symbols == {z}NEWLINE assert ConditionSet(x, Eq(x, 0), FiniteSet(x, z)NEWLINE ).free_symbols == {x, z}NEWLINENEWLINENEWLINEdef test_subs_CondSet():NEWLINE s = FiniteSet(z, y)NEWLINE c = ConditionSet(x, x < 2, s)NEWLINE # you can only replace sym with a symbol that is not inNEWLINE # the free symbolsNEWLINE assert c.subs(x, 1) == cNEWLINE assert c.subs(x, y) == ConditionSet(y, y < 2, s)NEWLINENEWLINE # double subs needed to change dummy if the base setNEWLINE # also contains the dummyNEWLINE orig = ConditionSet(y, y < 2, s)NEWLINE base = orig.subs(y, w)NEWLINE and_dummy = base.subs(y, w)NEWLINE assert base == ConditionSet(y, y < 2, {w, z})NEWLINE assert and_dummy == ConditionSet(w, w < 2, {w, z})NEWLINENEWLINE assert c.subs(x, w) == ConditionSet(w, w < 2, s)NEWLINE assert ConditionSet(x, x < y, sNEWLINE ).subs(y, w) == ConditionSet(x, x < w, s.subs(y, w))NEWLINE # if the user uses assumptions that cause the conditionNEWLINE # to evaluate, that can't be helped from SymPy's endNEWLINE n = Symbol('n', negative=True)NEWLINE assert ConditionSet(n, 0 < n, S.Integers) is S.EmptySetNEWLINE p = Symbol('p', positive=True)NEWLINE assert ConditionSet(n, n < y, S.IntegersNEWLINE ).subs(n, x) == ConditionSet(x, x < y, S.Integers)NEWLINE nc = Symbol('nc', commutative=False)NEWLINE raises(ValueError, lambda: ConditionSet(NEWLINE x, x < p, S.Integers).subs(x, nc))NEWLINE raises(ValueError, lambda: ConditionSet(NEWLINE x, x < p, S.Integers).subs(x, n))NEWLINE raises(ValueError, lambda: ConditionSet(NEWLINE x + 1, x < 1, S.Integers))NEWLINE raises(ValueError, lambda: ConditionSet(NEWLINE x + 1, x < 1, s))NEWLINE assert ConditionSet(NEWLINE n, n < x, Interval(0, oo)).subs(x, p) == Interval(0, oo)NEWLINE assert ConditionSet(NEWLINE n, n < x, Interval(-oo, 0)).subs(x, p) == Interval(-oo, 0)NEWLINENEWLINE assert ConditionSet(f(x), f(x) < 1, {w, z}NEWLINE ).subs(f(x), y) == ConditionSet(y, y < 1, {w, z})NEWLINENEWLINE # issue 17341NEWLINE k = Symbol('k')NEWLINE img1 = imageset(Lambda(k, 2*k*pi + asin(y)), S.Integers)NEWLINE img2 = imageset(Lambda(k, 2*k*pi + asin(S.One/3)), S.Integers)NEWLINE assert ConditionSet(x, Contains(NEWLINE y, Interval(-1,1)), img1).subs(y, S.One/3).dummy_eq(img2)NEWLINENEWLINENEWLINEdef test_subs_CondSet_tebr():NEWLINE with warns_deprecated_sympy():NEWLINE assert ConditionSet((x, y), {x + 1, x + y}, S.Reals) == \NEWLINE ConditionSet((x, y), Eq(x + 1, 0) & Eq(x + y, 0), S.Reals)NEWLINENEWLINE c = ConditionSet((x, y), Eq(x + 1, 0) & Eq(x + y, 0), S.Reals)NEWLINE assert c.subs(x, z) == cNEWLINENEWLINENEWLINEdef test_dummy_eq():NEWLINE C = ConditionSetNEWLINE I = S.IntegersNEWLINE c = C(x, x < 1, I)NEWLINE assert c.dummy_eq(C(y, y < 1, I))NEWLINE assert c.dummy_eq(1) == FalseNEWLINE assert c.dummy_eq(C(x, x < 1, S.Reals)) == FalseNEWLINE raises(ValueError, lambda: c.dummy_eq(C(x, x < 1, S.Reals), z))NEWLINENEWLINE c1 = ConditionSet((x, y), Eq(x + 1, 0) & Eq(x + y, 0), S.Reals)NEWLINE c2 = ConditionSet((x, y), Eq(x + 1, 0) & Eq(x + y, 0), S.Reals)NEWLINE c3 = ConditionSet((x, y), Eq(x + 1, 0) & Eq(x + y, 0), S.Complexes)NEWLINE assert c1.dummy_eq(c2)NEWLINE assert c1.dummy_eq(c3) is FalseNEWLINE assert c.dummy_eq(c1) is FalseNEWLINE assert c1.dummy_eq(c) is FalseNEWLINENEWLINENEWLINEdef test_contains():NEWLINE assert 6 in ConditionSet(x, x > 5, Interval(1, 7))NEWLINE assert (8 in ConditionSet(x, y > 5, Interval(1, 7))) is FalseNEWLINE # `in` should give True or False; in this case there is notNEWLINE # enough information for that resultNEWLINE raises(TypeError,NEWLINE lambda: 6 in ConditionSet(x, y > 5, Interval(1, 7)))NEWLINE assert ConditionSet(x, y > 5, Interval(1, 7)NEWLINE ).contains(6) == (y > 5)NEWLINE assert ConditionSet(x, y > 5, Interval(1, 7)NEWLINE ).contains(8) is S.falseNEWLINE assert ConditionSet(x, y > 5, Interval(1, 7)NEWLINE ).contains(w) == And(Contains(w, Interval(1, 7)), y > 5)NEWLINENEWLINE@XFAILNEWLINEdef test_failing_contains():NEWLINE # XXX This may have to return unevaluated Contains objectNEWLINE # because 1/0 should not be defined for 1 and 0 in the context ofNEWLINE # reals, but there is a nonsensical evaluation to ComplexInfinityNEWLINE # and the comparison is giving an error.NEWLINE assert ConditionSet(x, 1/x >= 0, S.Reals).contains(0) == \NEWLINE Contains(0, ConditionSet(x, 1/x >= 0, S.Reals), evaluate=False)NEWLINE |
#!/usr/bin/env pythonNEWLINE# Jonas Schnelli, 2013NEWLINE# make sure the Lissomcoin-Qt.app contains the right plist (including the right version)NEWLINE# fix made because of serval bugs in Qt mac deployment (https://bugreports.qt-project.org/browse/QTBUG-21267)NEWLINENEWLINEfrom string import TemplateNEWLINEfrom datetime import dateNEWLINENEWLINEbitcoinDir = "./";NEWLINENEWLINEinFile = bitcoinDir+"/share/qt/Info.plist"NEWLINEoutFile = "Lissomcoin-Qt.app/Contents/Info.plist"NEWLINEversion = "unknown";NEWLINENEWLINEfileForGrabbingVersion = bitcoinDir+"bitcoin-qt.pro"NEWLINEfor line in open(fileForGrabbingVersion):NEWLINE lineArr = line.replace(" ", "").split("=");NEWLINE if lineArr[0].startswith("VERSION"):NEWLINE version = lineArr[1].replace("\n", "");NEWLINENEWLINEfIn = open(inFile, "r")NEWLINEfileContent = fIn.read()NEWLINEs = Template(fileContent)NEWLINEnewFileContent = s.substitute(VERSION=version,YEAR=date.today().year)NEWLINENEWLINEfOut = open(outFile, "w");NEWLINEfOut.write(newFileContent);NEWLINENEWLINEprint "Info.plist fresh created"NEWLINE |
import taichi as tiNEWLINENEWLINEti.init()NEWLINENEWLINEn = 512NEWLINEx = ti.field(dtype=ti.f32, shape=(n, n))[email protected] paint():NEWLINE for i, j in ti.ndrange(n * 4, n * 4):NEWLINE # 4x4 super sampling:NEWLINE ret = ti.taichi_logo(ti.Vector([i, j]) / (n * 4))NEWLINE x[i // 4, j // 4] += ret / 16NEWLINENEWLINENEWLINEdef main():NEWLINE paint()NEWLINENEWLINE gui = ti.GUI('Logo', (n, n))NEWLINE while gui.running:NEWLINE gui.set_image(x)NEWLINE gui.show()NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE main()NEWLINE |
# vim: tabstop=4 shiftwidth=4 softtabstop=4NEWLINENEWLINE# Copyright 2011 OpenStack FoundationNEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License"); you mayNEWLINE# not use this file except in compliance with the License. You may obtainNEWLINE# a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS, WITHOUTNEWLINE# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See theNEWLINE# License for the specific language governing permissions and limitationsNEWLINE# under the License.NEWLINE"""NEWLINEA module where we define some basic units for use across Cinder.NEWLINE"""NEWLINENEWLINEKiB = 1024NEWLINEMiB = KiB * 1024NEWLINEGiB = MiB * 1024NEWLINE |
#!/usr/bin/env pythonNEWLINENEWLINEfrom __future__ import print_functionNEWLINENEWLINEfrom builtins import rangeNEWLINEimport osNEWLINEimport reNEWLINEimport sysNEWLINEimport globNEWLINEimport jsonNEWLINEimport mathNEWLINEimport bisectNEWLINEimport randomNEWLINEimport signalNEWLINEif sys.version_info[0]>2:NEWLINE import _pickle as cPickleNEWLINEelse:NEWLINE import cPickleNEWLINEimport difflibNEWLINEimport argparseNEWLINEimport functoolsNEWLINEimport itertoolsNEWLINEimport subprocessNEWLINEimport collectionsNEWLINEimport multiprocessingNEWLINEimport FWCore.PythonUtilities.LumiList as LumiListNEWLINEimport Utilities.General.cmssw_das_client as cmssw_das_clientNEWLINEimport Alignment.MillePedeAlignmentAlgorithm.mpslib.tools as mps_toolsNEWLINENEWLINENEWLINE################################################################################NEWLINEdef main(argv = None):NEWLINE """NEWLINE Main routine. Not called, if this module is loaded via `import`.NEWLINENEWLINE Arguments:NEWLINE - `argv`: Command line arguments passed to the script.NEWLINE """NEWLINENEWLINE if argv == None:NEWLINE argv = sys.argv[1:]NEWLINENEWLINE file_list_creator = FileListCreator(argv)NEWLINE file_list_creator.create()NEWLINENEWLINENEWLINE################################################################################NEWLINEclass FileListCreator(object):NEWLINE """Create file lists for alignment and validation for a given dataset.NEWLINE """NEWLINENEWLINE def __init__(self, argv):NEWLINE """Constructor taking the command line arguments.NEWLINENEWLINE Arguments:NEWLINE - `args`: command line argumentsNEWLINE """NEWLINENEWLINE self._first_dataset_ini = TrueNEWLINE self._parser = self._define_parser()NEWLINE self._args = self._parser.parse_args(argv)NEWLINENEWLINE if not mps_tools.check_proxy():NEWLINE print_msg(NEWLINE "Please create proxy via 'voms-proxy-init -voms cms -rfc'.")NEWLINE sys.exit(1)NEWLINENEWLINE self._dataset_regex = re.compile(r"^/([^/]+)/([^/]+)/([^/]+)$")NEWLINE self._validate_input()NEWLINENEWLINE if self._args.test_mode:NEWLINE import Configuration.PyReleaseValidation.relval_steps as rvsNEWLINE import Configuration.PyReleaseValidation.relval_production as rvpNEWLINE self._args.datasets = [rvs.steps[rvp.workflows[1000][1][0]]["INPUT"].dataSet]NEWLINE self._validate_input() # ensure that this change is validNEWLINENEWLINE self._datasets = sorted([datasetNEWLINE for pattern in self._args.datasetsNEWLINE for dataset in get_datasets(pattern)NEWLINE if re.search(self._args.dataset_filter, dataset)])NEWLINE if len(self._datasets) == 0:NEWLINE print_msg("Found no dataset matching the pattern(s):")NEWLINE for d in self._args.datasets: print_msg("\t"+d)NEWLINE sys.exit(1)NEWLINENEWLINE self._formatted_dataset = merge_strings(NEWLINE [re.sub(self._dataset_regex, r"\1_\2_\3", dataset)NEWLINE for dataset in self._datasets])NEWLINE self._output_dir = os.path.join(self._args.output_dir,NEWLINE self._formatted_dataset)NEWLINE self._output_dir = os.path.abspath(self._output_dir)NEWLINE self._cache = _DasCache(self._output_dir)NEWLINE self._prepare_iov_datastructures()NEWLINE self._prepare_run_datastructures()NEWLINENEWLINE try:NEWLINE os.makedirs(self._output_dir)NEWLINE except OSError as e:NEWLINE if e.args == (17, "File exists"):NEWLINE if self._args.force:NEWLINE pass # do nothing, just clear the existing outputNEWLINE elif self._args.use_cache:NEWLINE self._cache.load() # load cache before clearing the outputNEWLINE else:NEWLINE print_msg("Directory '{}' already exists from previous runs"NEWLINE " of the script. Use '--use-cache' if you want to"NEWLINE " use the cached DAS-query results Or use "NEWLINE "'--force' to remove it."NEWLINE .format(self._output_dir))NEWLINE sys.exit(1)NEWLINE files = glob.glob(os.path.join(self._output_dir, "*"))NEWLINE for f in files: os.remove(f)NEWLINE else:NEWLINE raiseNEWLINENEWLINENEWLINE def create(self):NEWLINE """Creates file list. To be called by user of the class."""NEWLINENEWLINE self._request_dataset_information()NEWLINE self._create_file_lists()NEWLINE self._print_eventcounts()NEWLINE self._write_file_lists()NEWLINENEWLINENEWLINE _event_count_log = "event_count_info.log"NEWLINENEWLINENEWLINE def _define_parser(self):NEWLINE """Definition of command line argument parser."""NEWLINENEWLINE parser = argparse.ArgumentParser(NEWLINE description = "Create file lists for alignment",NEWLINE epilog = ("The tool will create a directory containing all file "NEWLINE "lists and a log file with all relevant event counts "NEWLINE "('{}').".format(FileListCreator._event_count_log)))NEWLINE parser.add_argument("-i", "--input", dest = "datasets", required = True,NEWLINE metavar = "DATASET", action = "append",NEWLINE help = ("CMS dataset name; supports wildcards; "NEWLINE "use multiple times for multiple datasets"))NEWLINE parser.add_argument("--dataset-filter", default = "",NEWLINE help = "regex to match within in the datasets matched,"NEWLINE "in case the wildcard isn't flexible enough")NEWLINE parser.add_argument("-j", "--json", dest = "json", metavar = "PATH",NEWLINE help = "path to JSON file (optional)")NEWLINE parser.add_argument("-f", "--fraction", dest = "fraction",NEWLINE type = float, default = 1,NEWLINE help = "max. fraction of files used for alignment")NEWLINE parser.add_argument("--iov", dest = "iovs", metavar = "RUN", type = int,NEWLINE action = "append", default = [],NEWLINE help = ("define IOV by specifying first run; for "NEWLINE "multiple IOVs use this option multiple "NEWLINE "times; files from runs before the lowest "NEWLINE "IOV are discarded (default: 1)"))NEWLINE parser.add_argument("--miniiov", dest="miniiovs", metavar="RUN", type=int,NEWLINE action="append", default=[],NEWLINE help=("in addition to the standard IOVs, break up hippy jobs "NEWLINE "at these points, so that jobs from before and after "NEWLINE "these runs are not in the same job"))NEWLINE parser.add_argument("-r", "--random", action = "store_true",NEWLINE default = False, help = "select files randomly")NEWLINE parser.add_argument("-n", "--events-for-alignment", "--maxevents",NEWLINE dest = "events", type = int, metavar = "NUMBER",NEWLINE help = ("number of events needed for alignment; the"NEWLINE " remaining events in the dataset are used "NEWLINE "for validation; if n<=0, all events are "NEWLINE "used for validation"))NEWLINE parser.add_argument("--all-events", action = "store_true",NEWLINE help = "Use all events for alignment")NEWLINE parser.add_argument("--tracks-for-alignment", dest = "tracks",NEWLINE type = int, metavar = "NUMBER",NEWLINE help = "number of tracks needed for alignment")NEWLINE parser.add_argument("--track-rate", dest = "rate", type = float,NEWLINE metavar = "NUMBER",NEWLINE help = "number of tracks per event")NEWLINE parser.add_argument("--run-by-run", dest = "run_by_run",NEWLINE action = "store_true", default = False,NEWLINE help = "create validation file list for each run")NEWLINE parser.add_argument("--minimum-events-in-iov",NEWLINE dest = "minimum_events_in_iov", metavar = "NUMBER",NEWLINE type = int, default = 100000,NEWLINE help = ("minimum number of events for alignment per"NEWLINE " IOV; this option has a higher priority "NEWLINE "than '-f/--fraction' "NEWLINE "(default: %(default)s)"))NEWLINE parser.add_argument("--minimum-events-validation",NEWLINE dest = "minimum_events_validation",NEWLINE metavar = "NUMBER", type = int, default = 1,NEWLINE help = ("minimum number of events for validation; "NEWLINE "applies to IOVs; in case of --run-by-run "NEWLINE "it applies to runs runs "NEWLINE "(default: %(default)s)"))NEWLINE parser.add_argument("--use-cache", dest = "use_cache",NEWLINE action = "store_true", default = False,NEWLINE help = "use DAS-query results of previous run")NEWLINE parser.add_argument("-o", "--output-dir", dest = "output_dir",NEWLINE metavar = "PATH", default = os.getcwd(),NEWLINE help = "output base directory (default: %(default)s)")NEWLINE parser.add_argument("--create-ini", dest = "create_ini",NEWLINE action = "store_true", default = False,NEWLINE help = ("create dataset ini file based on the "NEWLINE "created file lists"))NEWLINE parser.add_argument("--force", action = "store_true", default = False,NEWLINE help = ("remove output directory from previous "NEWLINE "runs, if existing"))NEWLINE parser.add_argument("--hippy-events-per-job", type = int, default = 1,NEWLINE help = ("approximate number of events in each job for HipPy"))NEWLINE parser.add_argument("--test-mode", dest = "test_mode",NEWLINE action = "store_true", default = False,NEWLINE help = argparse.SUPPRESS) # hidden optionNEWLINE return parserNEWLINENEWLINENEWLINE def _validate_input(self):NEWLINE """Validate command line arguments."""NEWLINENEWLINE if self._args.events is None:NEWLINE if self._args.all_events:NEWLINE self._args.events = float("inf")NEWLINE print_msg("Using all tracks for alignment")NEWLINE elif (self._args.tracks is None) and (self._args.rate is None):NEWLINE msg = ("either -n/--events-for-alignment, --all-events, or both of "NEWLINE "--tracks-for-alignment and --track-rate are required")NEWLINE self._parser.error(msg)NEWLINE elif (((self._args.tracks is not None) and (self._args.rate is None)) orNEWLINE ((self._args.rate is not None)and (self._args.tracks is None))):NEWLINE msg = ("--tracks-for-alignment and --track-rate must be used "NEWLINE "together")NEWLINE self._parser.error(msg)NEWLINE else:NEWLINE self._args.events = int(math.ceil(self._args.tracks /NEWLINE self._args.rate))NEWLINE print_msg("Requested {0:d} tracks with {1:.2f} tracks/event "NEWLINE "-> {2:d} events for alignment."NEWLINE .format(self._args.tracks, self._args.rate,NEWLINE self._args.events))NEWLINE else:NEWLINE if (self._args.tracks is not None) or (self._args.rate is not None) or self._args.all_events:NEWLINE msg = ("-n/--events-for-alignment must not be used with "NEWLINE "--tracks-for-alignment, --track-rate, or --all-events")NEWLINE self._parser.error(msg)NEWLINE print_msg("Requested {0:d} events for alignment."NEWLINE .format(self._args.events))NEWLINENEWLINE for dataset in self._args.datasets:NEWLINE if not re.match(self._dataset_regex, dataset):NEWLINE print_msg("Dataset pattern '"+dataset+"' is not in CMS format.")NEWLINE sys.exit(1)NEWLINENEWLINE nonzero_events_per_iov = (self._args.minimum_events_in_iov > 0)NEWLINE if nonzero_events_per_iov and self._args.fraction <= 0:NEWLINE print_msg("Setting minimum number of events per IOV for alignment "NEWLINE "to 0 because a non-positive fraction of alignment events"NEWLINE " is chosen: {}".format(self._args.fraction))NEWLINE nonzero_events_per_iov = FalseNEWLINE self._args.minimum_events_in_iov = 0NEWLINE if nonzero_events_per_iov and self._args.events <= 0:NEWLINE print_msg("Setting minimum number of events per IOV for alignment "NEWLINE "to 0 because a non-positive number of alignment events"NEWLINE " is chosen: {}".format(self._args.events))NEWLINE nonzero_events_per_iov = FalseNEWLINE self._args.minimum_events_in_iov = 0NEWLINENEWLINENEWLINE def _prepare_iov_datastructures(self):NEWLINE """Create the needed objects for IOV handling."""NEWLINENEWLINE self._iovs = sorted(set(self._args.iovs))NEWLINE if len(self._iovs) == 0: self._iovs.append(1)NEWLINE self._iov_info_alignment = {iov: {"events": 0, "files": []}NEWLINE for iov in self._iovs}NEWLINE self._iov_info_validation = {iov: {"events": 0, "files": []}NEWLINE for iov in self._iovs}NEWLINENEWLINE self._miniiovs = sorted(set(self._iovs) | set(self._args.miniiovs))NEWLINENEWLINENEWLINE def _get_iovs(self, runs, useminiiovs=False):NEWLINE """NEWLINE Return the IOV start for `run`. Returns 'None' if the run is before anyNEWLINE defined IOV.NEWLINENEWLINE Arguments:NEWLINE - `runs`: run numbersNEWLINE """NEWLINENEWLINE iovlist = self._miniiovs if useminiiovs else self._iovsNEWLINENEWLINE iovs = []NEWLINE for run in runs:NEWLINE iov_index = bisect.bisect(iovlist, run)NEWLINE if iov_index > 0: iovs.append(iovlist[iov_index-1])NEWLINE return iovsNEWLINENEWLINENEWLINE def _prepare_run_datastructures(self):NEWLINE """Create the needed objects for run-by-run validation file lists."""NEWLINENEWLINE self._run_info = {}NEWLINENEWLINENEWLINE def _add_file_info(self, container, keys, fileinfo):NEWLINE """Add file with `file_name` to `container` using `key`.NEWLINENEWLINE Arguments:NEWLINE - `container`: dictionary holding information on files and event countsNEWLINE - `keys`: keys to which the info should be added; will be created if notNEWLINE existingNEWLINE - `file_name`: name of a dataset fileNEWLINE """NEWLINENEWLINE for key in keys:NEWLINE if key not in container:NEWLINE container[key] = {"events": 0,NEWLINE "files": []}NEWLINE container[key]["events"] += fileinfo.nevents / len(keys)NEWLINE if fileinfo not in container[key]["files"]:NEWLINE container[key]["files"].append(fileinfo)NEWLINENEWLINENEWLINE def _remove_file_info(self, container, keys, fileinfo):NEWLINE """Remove file with `file_name` to `container` using `key`.NEWLINENEWLINE Arguments:NEWLINE - `container`: dictionary holding information on files and event countsNEWLINE - `keys`: keys from which the info should be removedNEWLINE - `file_name`: name of a dataset fileNEWLINE - `event_count`: number of events in `file_name`NEWLINE """NEWLINENEWLINE for key in keys:NEWLINE if key not in container: continueNEWLINE try:NEWLINE index = container[key]["files"].index(fileinfo)NEWLINE except ValueError: # file not foundNEWLINE returnNEWLINE del container[key]["files"][index]NEWLINE container[key]["events"] -= fileinfo.nevents / len(keys)NEWLINENEWLINENEWLINE def _request_dataset_information(self):NEWLINE """Retrieve general dataset information and create file list."""NEWLINENEWLINE if not self._cache.empty:NEWLINE print_msg("Using cached information.")NEWLINE (self._events_in_dataset,NEWLINE self._files,NEWLINE self._file_info,NEWLINE self._max_run) = self._cache.get()NEWLINE self.rereco = any(len(fileinfo.runs)>1 for fileinfo in self._file_info)NEWLINE if self._args.random: random.shuffle(self._files)NEWLINE returnNEWLINENEWLINE # workaround to deal with KeyboardInterrupts in the worker processes:NEWLINE # - ignore interrupt signals in workers (see initializer)NEWLINE # - use a timeout of size sys.maxsize to avoid a bug in multiprocessingNEWLINE number_of_processes = multiprocessing.cpu_count() - 1NEWLINE number_of_processes = (number_of_processesNEWLINE if number_of_processes > 0NEWLINE else 1)NEWLINE pool = multiprocessing.Pool(NEWLINE processes = number_of_processes,NEWLINE initializer = lambda: signal.signal(signal.SIGINT, signal.SIG_IGN))NEWLINENEWLINE print_msg("Requesting information for the following dataset(s):")NEWLINE for d in self._datasets: print_msg("\t"+d)NEWLINE print_msg("This may take a while...")NEWLINENEWLINE result = pool.map_async(get_events_per_dataset, self._datasets).get(3600)NEWLINE self._events_in_dataset = sum(result)NEWLINENEWLINE result = pool.map_async(get_max_run, self._datasets).get(3600)NEWLINE self._max_run = max(result)NEWLINENEWLINE result = sum(pool.map_async(get_file_info, self._datasets).get(3600), [])NEWLINE files = pool.map_async(_make_file_info, result).get(3600)NEWLINE self._file_info = sorted(fileinfo for fileinfo in files)NEWLINENEWLINE self.rereco = any(len(fileinfo.runs)>1 for fileinfo in self._file_info)NEWLINENEWLINE if self._args.test_mode:NEWLINE self._file_info = self._file_info[-200:] # take only last chunk of filesNEWLINE self._files = [fileinfo.name for fileinfo in self._file_info]NEWLINENEWLINE # write information to cacheNEWLINE self._cache.set(self._events_in_dataset, self._files, self._file_info,NEWLINE self._max_run)NEWLINE self._cache.dump()NEWLINE if self._args.random:NEWLINE random.shuffle(self._file_info)NEWLINE self._files = [fileinfo.name for fileinfo in self._file_info]NEWLINENEWLINE def _create_file_lists(self):NEWLINE """Create file lists for alignment and validation."""NEWLINENEWLINE # collect files for alignment until minimal requirements are fulfilledNEWLINE self._files_alignment = []NEWLINE self._files_validation = []NEWLINE self._events_for_alignment = 0NEWLINE self._events_for_validation = 0NEWLINENEWLINE max_range = (0NEWLINE if self._args.events <= 0NEWLINE else int(math.ceil(len(self._files)*self._args.fraction)))NEWLINE use_for_alignment = TrueNEWLINE for i, fileinfo in enumerate(self._file_info):NEWLINE enough_events = self._events_for_alignment >= self._args.eventsNEWLINE fraction_exceeded = i >= max_rangeNEWLINE if enough_events or fraction_exceeded: use_for_alignment = FalseNEWLINENEWLINE dataset, f, number_of_events, runs = fileinfoNEWLINENEWLINE iovs = self._get_iovs(runs)NEWLINE if use_for_alignment:NEWLINE if iovs:NEWLINE self._events_for_alignment += number_of_eventsNEWLINE self._files_alignment.append(fileinfo)NEWLINE self._add_file_info(self._iov_info_alignment, iovs, fileinfo)NEWLINE else:NEWLINE max_range += 1 # not used -> discard in fraction calculationNEWLINE else:NEWLINE if iovs:NEWLINE self._events_for_validation += number_of_eventsNEWLINE self._files_validation.append(fileinfo)NEWLINE self._add_file_info(self._iov_info_validation, iovs, fileinfo)NEWLINE if self._args.run_by_run:NEWLINE self._add_file_info(self._run_info, runs, fileinfo)NEWLINENEWLINE self._fulfill_iov_eventcount()NEWLINENEWLINE self._split_hippy_jobs()NEWLINENEWLINENEWLINE def _fulfill_iov_eventcount(self):NEWLINE """NEWLINE Try to fulfill the requirement on the minimum number of events per IOVNEWLINE in the alignment file list by picking files from the validation list.NEWLINE """NEWLINENEWLINE for iov in self._iovs:NEWLINE if self._iov_info_alignment[iov]["events"] >= self._args.minimum_events_in_iov: continueNEWLINE for fileinfo in self._files_validation[:]:NEWLINE dataset, f, number_of_events, runs = fileinfoNEWLINE iovs = self._get_iovs(runs)NEWLINE if iov in iovs:NEWLINE self._files_alignment.append(fileinfo)NEWLINE self._events_for_alignment += number_of_eventsNEWLINE self._add_file_info(self._iov_info_alignment, iovs, fileinfo)NEWLINENEWLINE self._events_for_validation -= number_of_eventsNEWLINE self._remove_file_info(self._iov_info_validation, iovs, fileinfo)NEWLINE if self._args.run_by_run:NEWLINE self._remove_file_info(self._run_info, runs, fileinfo)NEWLINE self._files_validation.remove(fileinfo)NEWLINENEWLINE if (self._iov_info_alignment[iov]["events"]NEWLINE >= self._args.minimum_events_in_iov):NEWLINE break # break the file loop if already enough eventsNEWLINENEWLINE def _split_hippy_jobs(self):NEWLINE hippyjobs = {}NEWLINE for dataset, miniiov in itertools.product(self._datasets, self._miniiovs):NEWLINE jobsforminiiov = []NEWLINE hippyjobs[dataset,miniiov] = jobsforminiiovNEWLINE eventsinthisjob = float("inf")NEWLINE for fileinfo in self._files_alignment:NEWLINE if fileinfo.dataset != dataset: continueNEWLINE miniiovs = set(self._get_iovs(fileinfo.runs, useminiiovs=True))NEWLINE if miniiov not in miniiovs: continueNEWLINE if len(miniiovs) > 1:NEWLINE hippyjobs[dataset,miniiov] = []NEWLINE if eventsinthisjob >= self._args.hippy_events_per_job:NEWLINE currentjob = []NEWLINE jobsforminiiov.append(currentjob)NEWLINE eventsinthisjob = 0NEWLINE currentjob.append(fileinfo)NEWLINE currentjob.sort()NEWLINE eventsinthisjob += fileinfo.neventsNEWLINENEWLINE self._hippy_jobs = {NEWLINE (dataset, iov): sum((hippyjobs[dataset, miniiov]NEWLINE for miniiov in self._miniiovsNEWLINE if iov == max(_ for _ in self._iovs if _ <= miniiov)), []NEWLINE )NEWLINE for dataset, iov in itertools.product(self._datasets, self._iovs)NEWLINE }NEWLINENEWLINE def _print_eventcounts(self):NEWLINE """Print the event counts per file list and per IOV."""NEWLINENEWLINE log = os.path.join(self._output_dir, FileListCreator._event_count_log)NEWLINENEWLINE print_msg("Using {0:d} events for alignment ({1:.2f}%)."NEWLINE .format(self._events_for_alignment,NEWLINE 100.0*NEWLINE self._events_for_alignment/self._events_in_dataset),NEWLINE log_file = log)NEWLINE for iov in sorted(self._iov_info_alignment):NEWLINE print_msg(("Approximate events" if self.rereco else "Events") + " for alignment in IOV since {0:f}: {1:f}"NEWLINE .format(iov, self._iov_info_alignment[iov]["events"]),NEWLINE log_file = log)NEWLINENEWLINE print_msg("Using {0:d} events for validation ({1:.2f}%)."NEWLINE .format(self._events_for_validation,NEWLINE 100.0*NEWLINE self._events_for_validation/self._events_in_dataset),NEWLINE log_file = log)NEWLINENEWLINE for iov in sorted(self._iov_info_validation):NEWLINE msg = ("Approximate events" if self.rereco else "Events") + " for validation in IOV since {0:f}: {1:f}".format(NEWLINE iov, self._iov_info_validation[iov]["events"])NEWLINE if (self._iov_info_validation[iov]["events"]NEWLINE < self._args.minimum_events_validation):NEWLINE msg += " (not enough events -> no dataset file will be created)"NEWLINE print_msg(msg, log_file = log)NEWLINENEWLINE for run in sorted(self._run_info):NEWLINE msg = ("Approximate events" if self.rereco else "Events") + " for validation in run {0:f}: {1:f}".format(NEWLINE run, self._run_info[run]["events"])NEWLINE if (self._run_info[run]["events"]NEWLINE < self._args.minimum_events_validation):NEWLINE msg += " (not enough events -> no dataset file will be created)"NEWLINE print_msg(msg, log_file = log)NEWLINENEWLINE unused_events = (self._events_in_datasetNEWLINE - self._events_for_validationNEWLINE - self._events_for_alignment)NEWLINE if unused_events > 0 != self._events_in_dataset:NEWLINE print_msg("Unused events: {0:d} ({1:.2f}%)"NEWLINE .format(unused_events,NEWLINE 100.0*unused_events/self._events_in_dataset),NEWLINE log_file = log)NEWLINENEWLINENEWLINE def _create_dataset_ini_section(self, name, collection, json_file = None):NEWLINE """Write dataset ini snippet.NEWLINENEWLINE Arguments:NEWLINE - `name`: name of the dataset sectionNEWLINE - `collection`: track collection of this datasetNEWLINE - `json_file`: JSON file to be used for this dataset (optional)NEWLINE """NEWLINENEWLINE if json_file:NEWLINE splitted = name.split("_since")NEWLINE file_list = "_since".join(splitted[:-1]NEWLINE if len(splitted) > 1NEWLINE else splitted)NEWLINE else:NEWLINE file_list = nameNEWLINE output = "[dataset:{}]\n".format(name)NEWLINE output += "collection = {}\n".format(collection)NEWLINE output += "inputFileList = ${{datasetdir}}/{}.txt\n".format(file_list)NEWLINE output += "json = ${{datasetdir}}/{}\n".format(json_file) if json_file else ""NEWLINENEWLINE if collection in ("ALCARECOTkAlCosmicsCTF0T",NEWLINE "ALCARECOTkAlCosmicsInCollisions"):NEWLINE if self._first_dataset_ini:NEWLINE print_msg("\tDetermined cosmics dataset, i.e. please replace "NEWLINE "'DUMMY_DECO_MODE_FLAG' and 'DUMMY_ZERO_TESLA_FLAG' "NEWLINE "with the correct values.")NEWLINE self._first_dataset_ini = FalseNEWLINE output += "cosmicsDecoMode = DUMMY_DECO_MODE_FLAG\n"NEWLINE output += "cosmicsZeroTesla = DUMMY_ZERO_TESLA_FLAG\n"NEWLINE output += "\n"NEWLINENEWLINE return outputNEWLINENEWLINENEWLINE def _create_json_file(self, name, first, last = None):NEWLINE """NEWLINE Create JSON file with `name` covering runs from `first` to `last`. If aNEWLINE global JSON is provided, the resulting file is the intersection of theNEWLINE file created here and the global one.NEWLINE Returns the name of the created JSON file.NEWLINENEWLINE Arguments:NEWLINE - `name`: name of the creted JSON fileNEWLINE - `first`: first run covered by the JSON fileNEWLINE - `last`: last run covered by the JSON fileNEWLINENEWLINE """NEWLINENEWLINE if last is None: last = self._max_runNEWLINE name += "_JSON.txt"NEWLINE print_msg("Creating JSON file: "+name)NEWLINENEWLINE json_file = LumiList.LumiList(runs = range(first, last+1))NEWLINE if self._args.json:NEWLINE global_json = LumiList.LumiList(filename = self._args.json)NEWLINE json_file = json_file & global_jsonNEWLINE json_file.writeJSON(os.path.join(self._output_dir, name))NEWLINENEWLINE return nameNEWLINENEWLINENEWLINE def _get_track_collection(self, edm_file):NEWLINE """Extract track collection from given `edm_file`.NEWLINENEWLINE Arguments:NEWLINE - `edm_file`: CMSSW dataset fileNEWLINE """NEWLINENEWLINE # use global redirector to allow also files not yet at your site:NEWLINE cmd = ["edmDumpEventContent", r"root://cms-xrd-global.cern.ch/"+edm_file]NEWLINE try:NEWLINE event_content = subprocess.check_output(cmd).split("\n")NEWLINE except subprocess.CalledProcessError as e:NEWLINE splitted = edm_file.split("/")NEWLINE try:NEWLINE alcareco = splitted[splitted.index("ALCARECO")+1].split("-")[0]NEWLINE alcareco = alcareco.replace("TkAlCosmics0T", "TkAlCosmicsCTF0T")NEWLINE alcareco = "ALCARECO" + alcarecoNEWLINE print_msg("\tDetermined track collection as '{}'.".format(alcareco))NEWLINE return alcarecoNEWLINE except ValueError:NEWLINE if "RECO" in splitted:NEWLINE print_msg("\tDetermined track collection as 'generalTracks'.")NEWLINE return "generalTracks"NEWLINE else:NEWLINE print_msg("\tCould not determine track collection "NEWLINE "automatically.")NEWLINE print_msg("\tPlease replace 'DUMMY_TRACK_COLLECTION' with "NEWLINE "the correct value.")NEWLINE return "DUMMY_TRACK_COLLECTION"NEWLINENEWLINE track_collections = []NEWLINE for line in event_content:NEWLINE splitted = line.split()NEWLINE if len(splitted) > 0 and splitted[0] == r"vector<reco::Track>":NEWLINE track_collections.append(splitted[1].strip().strip('"'))NEWLINE if len(track_collections) == 0:NEWLINE print_msg("No track collection found in file '{}'.".format(edm_file))NEWLINE sys.exit(1)NEWLINE elif len(track_collections) == 1:NEWLINE print_msg("\tDetermined track collection as "NEWLINE "'{}'.".format(track_collections[0]))NEWLINE return track_collections[0]NEWLINE else:NEWLINE alcareco_tracks = filter(lambda x: x.startswith("ALCARECO"),NEWLINE track_collections)NEWLINE if len(alcareco_tracks) == 0 and "generalTracks" in track_collections:NEWLINE print_msg("\tDetermined track collection as 'generalTracks'.")NEWLINE return "generalTracks"NEWLINE elif len(alcareco_tracks) == 1:NEWLINE print_msg("\tDetermined track collection as "NEWLINE "'{}'.".format(alcareco_tracks[0]))NEWLINE return alcareco_tracks[0]NEWLINE print_msg("\tCould not unambiguously determine track collection in "NEWLINE "file '{}':".format(edm_file))NEWLINE print_msg("\tPlease replace 'DUMMY_TRACK_COLLECTION' with "NEWLINE "the correct value from the following list.")NEWLINE for collection in track_collections:NEWLINE print_msg("\t - "+collection)NEWLINE return "DUMMY_TRACK_COLLECTION"NEWLINENEWLINENEWLINE def _write_file_lists(self):NEWLINE """Write file lists to disk."""NEWLINENEWLINE self._create_dataset_txt(self._formatted_dataset, self._files_alignment)NEWLINE self._create_hippy_txt(self._formatted_dataset, sum(self._hippy_jobs.values(), []))NEWLINE self._create_dataset_cff(NEWLINE "_".join(["Alignment", self._formatted_dataset]),NEWLINE self._files_alignment)NEWLINENEWLINE self._create_dataset_cff(NEWLINE "_".join(["Validation", self._formatted_dataset]),NEWLINE self._files_validation)NEWLINENEWLINENEWLINE if self._args.create_ini:NEWLINE dataset_ini_general = "[general]\n"NEWLINE dataset_ini_general += "datasetdir = {}\n".format(self._output_dir)NEWLINE dataset_ini_general += ("json = {}\n\n".format(self._args.json)NEWLINE if self._args.jsonNEWLINE else "\n")NEWLINENEWLINE ini_path = self._formatted_dataset + ".ini"NEWLINE print_msg("Creating dataset ini file: " + ini_path)NEWLINE ini_path = os.path.join(self._output_dir, ini_path)NEWLINENEWLINE collection = self._get_track_collection(self._files[0])NEWLINENEWLINE with open(ini_path, "w") as f:NEWLINE f.write(dataset_ini_general)NEWLINE f.write(self._create_dataset_ini_section(NEWLINE self._formatted_dataset, collection))NEWLINENEWLINE iov_wise_ini = dataset_ini_generalNEWLINENEWLINE for i,iov in enumerate(sorted(self._iovs)):NEWLINE iov_str = "since{0:d}".format(iov)NEWLINE iov_str = "_".join([self._formatted_dataset, iov_str])NEWLINENEWLINE if self.rereco:NEWLINE if i == len(self._iovs) - 1:NEWLINE last = NoneNEWLINE else:NEWLINE last = sorted(self._iovs)[i+1] - 1NEWLINE local_json = self._create_json_file(iov_str, iov, last)NEWLINE else:NEWLINE local_json = NoneNEWLINENEWLINE if self._args.create_ini:NEWLINE iov_wise_ini += self._create_dataset_ini_section(iov_str,NEWLINE collection,NEWLINE local_json)NEWLINENEWLINE self._create_dataset_txt(iov_str,NEWLINE self._iov_info_alignment[iov]["files"])NEWLINE self._create_hippy_txt(iov_str, sum((self._hippy_jobs[dataset,iov] for dataset in self._datasets), []))NEWLINE self._create_dataset_cff(NEWLINE "_".join(["Alignment", iov_str]),NEWLINE self._iov_info_alignment[iov]["files"],NEWLINE json_file=local_json)NEWLINENEWLINE if (self._iov_info_validation[iov]["events"]NEWLINE < self._args.minimum_events_validation):NEWLINE continueNEWLINE self._create_dataset_cff(NEWLINE "_".join(["Validation", iov_str]),NEWLINE self._iov_info_validation[iov]["files"],NEWLINE json_file=local_json)NEWLINENEWLINE if self._args.create_ini and iov_wise_ini != dataset_ini_general:NEWLINE ini_path = self._formatted_dataset + "_IOVs.ini"NEWLINE print_msg("Creating dataset ini file: " + ini_path)NEWLINE ini_path = os.path.join(self._output_dir, ini_path)NEWLINE with open(ini_path, "w") as f: f.write(iov_wise_ini)NEWLINENEWLINE for run in sorted(self._run_info):NEWLINE if args.rereco: continue #need to implement more jsonsNEWLINE if (self._run_info[run]["events"]NEWLINE < self._args.minimum_events_validation):NEWLINE continueNEWLINE self._create_dataset_cff(NEWLINE "_".join(["Validation", self._formatted_dataset, str(run)]),NEWLINE self._run_info[run]["files"])NEWLINENEWLINENEWLINE def _create_dataset_txt(self, name, file_list):NEWLINE """Write alignment file list to disk.NEWLINENEWLINE Arguments:NEWLINE - `name`: name of the file listNEWLINE - `file_list`: list of files to write to `name`NEWLINE """NEWLINENEWLINE name += ".txt"NEWLINE print_msg("Creating dataset file list: "+name)NEWLINE with open(os.path.join(self._output_dir, name), "w") as f:NEWLINE f.write("\n".join(fileinfo.name for fileinfo in file_list))NEWLINENEWLINENEWLINE def _create_hippy_txt(self, name, job_list):NEWLINE name += "_hippy.txt"NEWLINE print_msg("Creating dataset file list for HipPy: "+name)NEWLINE with open(os.path.join(self._output_dir, name), "w") as f:NEWLINE f.write("\n".join(",".join("'"+fileinfo.name+"'" for fileinfo in job) for job in job_list)+"\n")NEWLINENEWLINENEWLINE def _create_dataset_cff(self, name, file_list, json_file = None):NEWLINE """NEWLINE Create configuration fragment to define a dataset.NEWLINENEWLINE Arguments:NEWLINE - `name`: name of the configuration fragmentNEWLINE - `file_list`: list of files to write to `name`NEWLINE - `json_file`: JSON file to be used for this dataset (optional)NEWLINE """NEWLINENEWLINE if json_file is None: json_file = self._args.json # might still be NoneNEWLINE if json_file is not None:NEWLINE json_file = os.path.join(self._output_dir, json_file)NEWLINENEWLINE name = "_".join(["Dataset",name, "cff.py"])NEWLINE print_msg("Creating dataset configuration fragment: "+name)NEWLINENEWLINE file_list_str = ""NEWLINE for sub_list in get_chunks(file_list, 255):NEWLINE file_list_str += ("readFiles.extend([\n'"+NEWLINE "',\n'".join(fileinfo.name for fileinfo in sub_list)+NEWLINE "'\n])\n")NEWLINENEWLINE fragment = FileListCreator._dataset_template.format(NEWLINE lumi_def = ("import FWCore.PythonUtilities.LumiList as LumiList\n\n"NEWLINE "lumiSecs = cms.untracked.VLuminosityBlockRange()\n"NEWLINE "goodLumiSecs = LumiList.LumiList(filename = "NEWLINE "'{0:s}').getCMSSWString().split(',')"NEWLINE .format(json_file)NEWLINE if json_file else ""),NEWLINE lumi_arg = ("lumisToProcess = lumiSecs,\n "NEWLINE if json_file else ""),NEWLINE lumi_extend = "lumiSecs.extend(goodLumiSecs)" if json_file else "",NEWLINE files = file_list_str)NEWLINENEWLINE with open(os.path.join(self._output_dir, name), "w") as f:NEWLINE f.write(fragment)NEWLINENEWLINENEWLINE _dataset_template = """\NEWLINEimport FWCore.ParameterSet.Config as cmsNEWLINE{lumi_def:s}NEWLINEreadFiles = cms.untracked.vstring()NEWLINEsource = cms.Source("PoolSource",NEWLINE {lumi_arg:s}fileNames = readFiles)NEWLINE{files:s}{lumi_extend:s}NEWLINEmaxEvents = cms.untracked.PSet(input = cms.untracked.int32(-1))NEWLINE"""NEWLINENEWLINENEWLINEclass _DasCache(object):NEWLINE """Helper class to cache information from DAS requests."""NEWLINENEWLINE def __init__(self, file_list_id):NEWLINE """Constructor of the cache.NEWLINENEWLINE Arguments:NEWLINE - `file_list_id`: ID of the cached file listsNEWLINE """NEWLINENEWLINE self._file_list_id = file_list_idNEWLINE self._cache_file_name = os.path.join(file_list_id, ".das_cache.pkl")NEWLINE self.reset()NEWLINENEWLINENEWLINE def reset(self):NEWLINE """Reset the cache contents and the 'empty' flag."""NEWLINENEWLINE self._empty = TrueNEWLINE self._events_in_dataset = 0NEWLINE self._files = []NEWLINE self._file_info = []NEWLINE self._max_run = NoneNEWLINENEWLINENEWLINE def set(self, total_events, file_list, file_info, max_run):NEWLINE """Set the content of the cache.NEWLINENEWLINE Arguments:NEWLINE - `total_events`: total number of events in datasetNEWLINE - `file_list`: list of files in datasetNEWLINE - `file_info`: dictionary with numbers of events per fileNEWLINE - `max_run`: highest run number contained in the datasetNEWLINE """NEWLINENEWLINE self._events_in_dataset = total_eventsNEWLINE self._files = file_listNEWLINE self._file_info = file_infoNEWLINE self._max_run = max_runNEWLINE self._empty = FalseNEWLINENEWLINENEWLINE def get(self):NEWLINE """NEWLINE Get the content of the cache as tuple:NEWLINE result = (total number of events in dataset,NEWLINE list of files in dataset,NEWLINE dictionary with numbers of events and runs per file)NEWLINE """NEWLINENEWLINE return self._events_in_dataset, self._files, self._file_info, self._max_runNEWLINENEWLINENEWLINE def load(self):NEWLINE """Loads the cached contents."""NEWLINENEWLINE if not self.empty:NEWLINE print_msg("Overriding file information with cached information.")NEWLINE try:NEWLINE with open(self._cache_file_name, "rb") as f:NEWLINE tmp_dict = cPickle.load(f)NEWLINE self.__dict__.update(tmp_dict)NEWLINE except IOError as e:NEWLINE if e.args == (2, "No such file or directory"):NEWLINE msg = "Failed to load cache for '{}'.".format(self._file_list_id)NEWLINE if not self.empty:NEWLINE msg += " Keeping the previous file information."NEWLINE print_msg(msg)NEWLINE else:NEWLINE raiseNEWLINENEWLINENEWLINE def dump(self):NEWLINE """Dumps the contents to the cache file."""NEWLINENEWLINE if self.empty:NEWLINE print_msg("Cache is empty. Not writing to file.")NEWLINE returnNEWLINENEWLINE with open(self._cache_file_name, "wb") as f:NEWLINE cPickle.dump(self.__dict__, f, 2)NEWLINENEWLINENEWLINE @propertyNEWLINE def empty(self):NEWLINE """NEWLINE Flag indicating whether the cache is empty or has been filled (possiblyNEWLINE with nothing).NEWLINE """NEWLINENEWLINE return self._emptyNEWLINENEWLINENEWLINENEWLINE################################################################################NEWLINEdef das_client(query, check_key = None):NEWLINE """NEWLINE Submit `query` to DAS client and handle possible errors.NEWLINE Further treatment of the output might be necessary.NEWLINENEWLINE Arguments:NEWLINE - `query`: DAS queryNEWLINE - `check_key`: optional key to be checked for; retriggers query if neededNEWLINE """NEWLINENEWLINE error = TrueNEWLINE for i in range(5): # maximum of 5 triesNEWLINE try:NEWLINE das_data = cmssw_das_client.get_data(query, limit = 0)NEWLINE except IOError as e:NEWLINE if e.errno == 14: #https://stackoverflow.com/q/36397853/5228524NEWLINE continueNEWLINE except ValueError as e:NEWLINE if str(e) == "No JSON object could be decoded":NEWLINE continueNEWLINENEWLINE if das_data["status"] == "ok":NEWLINE if das_data["nresults"] == 0 or check_key is None:NEWLINE error = FalseNEWLINE breakNEWLINENEWLINE result_count = 0NEWLINE for d in find_key(das_data["data"], [check_key]):NEWLINE result_count += len(d)NEWLINE if result_count == 0:NEWLINE das_data["status"] = "error"NEWLINE das_data["reason"] = ("DAS did not return required data.")NEWLINE continueNEWLINE else:NEWLINE error = FalseNEWLINE breakNEWLINENEWLINE if das_data["status"] == "error":NEWLINE print_msg("DAS query '{}' failed 5 times. "NEWLINE "The last time for the the following reason:".format(query))NEWLINE print(das_data["reason"])NEWLINE sys.exit(1)NEWLINE return das_data["data"]NEWLINENEWLINENEWLINEdef find_key(collection, key_chain):NEWLINE """Searches for `key` in `collection` and returns first corresponding value.NEWLINENEWLINE Arguments:NEWLINE - `collection`: list of dictionariesNEWLINE - `key_chain`: chain of keys to be searched forNEWLINE """NEWLINENEWLINE result = NoneNEWLINE for i,key in enumerate(key_chain):NEWLINE for item in collection:NEWLINE if key in item:NEWLINE if i == len(key_chain) - 1:NEWLINE result = item[key]NEWLINE else:NEWLINE try:NEWLINE result = find_key(item[key], key_chain[i+1:])NEWLINE except LookupError:NEWLINE pass # continue with next `item` in `collection`NEWLINE else:NEWLINE pass # continue with next `item` in `collection`NEWLINENEWLINE if result is not None: return resultNEWLINE raise LookupError(key_chain, collection) # putNEWLINENEWLINENEWLINEdef print_msg(text, line_break = True, log_file = None):NEWLINE """Formatted printing of `text`.NEWLINENEWLINE Arguments:NEWLINE - `text`: string to be printedNEWLINE """NEWLINENEWLINE msg = " >>> " + str(text)NEWLINE if line_break:NEWLINE print(msg)NEWLINE else:NEWLINE print(msg, end=' ')NEWLINE sys.stdout.flush()NEWLINE if log_file:NEWLINE with open(log_file, "a") as f: f.write(msg+"\n")NEWLINE return msgNEWLINENEWLINENEWLINEdef get_runs(file_name):NEWLINE """NEWLINE Try to guess the run number from `file_name`. If run could not beNEWLINE determined, gets the run numbers from DAS (slow!)NEWLINENEWLINE Arguments:NEWLINE - `file_name`: name of the considered fileNEWLINE """NEWLINE try:NEWLINE return [int("".join(file_name.split("/")[-4:-2]))]NEWLINE except ValueError:NEWLINE query = "run file="+file_name+" system=dbs3"NEWLINE return [int(_) for _ in find_key(das_client(query), ["run", "run_number"])]NEWLINENEWLINENEWLINEdef get_max_run(dataset_name):NEWLINE """Retrieve the maximum run number in `dataset_name`.NEWLINENEWLINE Arguments:NEWLINE - `dataset_name`: name of the datasetNEWLINE """NEWLINENEWLINE data = das_client("run dataset={0:s} system=dbs3".format(dataset_name))NEWLINE runs = [f["run"][0]["run_number"] for f in data]NEWLINE return max(runs)NEWLINENEWLINENEWLINEdef get_files(dataset_name):NEWLINE """Retrieve list of files in `dataset_name`.NEWLINENEWLINE Arguments:NEWLINE - `dataset_name`: name of the datasetNEWLINE """NEWLINENEWLINE data = das_client(("file dataset={0:s} system=dbs3 detail=True | "+NEWLINE "grep file.name, file.nevents > 0").format(dataset_name),NEWLINE "file")NEWLINE return [find_key(f["file"], ["name"]) for f in data]NEWLINENEWLINENEWLINEdef get_datasets(dataset_pattern):NEWLINE """Retrieve list of dataset matching `dataset_pattern`.NEWLINENEWLINE Arguments:NEWLINE - `dataset_pattern`: pattern of dataset namesNEWLINE """NEWLINENEWLINE data = das_client("dataset dataset={0:s} system=dbs3 detail=True"NEWLINE "| grep dataset.name".format(dataset_pattern), "dataset")NEWLINE return sorted(set([find_key(f["dataset"], ["name"]) for f in data]))NEWLINENEWLINENEWLINEdef get_events_per_dataset(dataset_name):NEWLINE """Retrieve the number of a events in `dataset_name`.NEWLINENEWLINE Arguments:NEWLINE - `dataset_name`: name of a datasetNEWLINE """NEWLINENEWLINE return _get_events("dataset", dataset_name)NEWLINENEWLINENEWLINEdef get_events_per_file(file_name):NEWLINE """Retrieve the number of a events in `file_name`.NEWLINENEWLINE Arguments:NEWLINE - `file_name`: name of a dataset fileNEWLINE """NEWLINENEWLINE return _get_events("file", file_name)NEWLINENEWLINENEWLINEdef _get_events(entity, name):NEWLINE """Retrieve the number of events from `entity` called `name`.NEWLINENEWLINE Arguments:NEWLINE - `entity`: type of entityNEWLINE - `name`: name of entityNEWLINE """NEWLINENEWLINE data = das_client("{0:s}={1:s} system=dbs3 detail=True | grep {0:s}.nevents"NEWLINE .format(entity, name), entity)NEWLINE return int(find_key(data, [entity, "nevents"]))NEWLINENEWLINENEWLINEdef _get_properties(name, entity, properties, filters = None, sub_entity = None,NEWLINE aggregators = None):NEWLINE """Retrieve `properties` from `entity` called `name`.NEWLINENEWLINE Arguments:NEWLINE - `name`: name of entityNEWLINE - `entity`: type of entityNEWLINE - `properties`: list of property namesNEWLINE - `filters`: list of filters on propertiesNEWLINE - `sub_entity`: type of entity from which to extract the properties;NEWLINE defaults to `entity`NEWLINE - `aggregators`: additional aggregators/filters to amend to queryNEWLINE """NEWLINENEWLINE if sub_entity is None: sub_entity = entityNEWLINE if filters is None: filters = []NEWLINE props = ["{0:s}.{1:s}".format(sub_entity,prop.split()[0])NEWLINE for prop in properties]NEWLINE conditions = ["{0:s}.{1:s}".format(sub_entity, filt)NEWLINE for filt in filters]NEWLINE add_ons = "" if aggregators is None else " | "+" | ".join(aggregators)NEWLINENEWLINE data = das_client("{0:s} {1:s}={2:s} system=dbs3 detail=True | grep {3:s}{4:s}"NEWLINE .format(sub_entity, entity, name,NEWLINE ", ".join(props+conditions), add_ons), sub_entity)NEWLINE return [[find_key(f[sub_entity], [prop]) for prop in properties] for f in data]NEWLINENEWLINEdef get_file_info(dataset):NEWLINE result = _get_properties(name=dataset,NEWLINE properties = ["name", "nevents"],NEWLINE filters = ["nevents > 0"],NEWLINE entity = "dataset",NEWLINE sub_entity = "file")NEWLINE return [(dataset, name, nevents) for name, nevents in result]NEWLINENEWLINENEWLINENEWLINEFileInfo = collections.namedtuple("FileInfo", "dataset name nevents runs")NEWLINENEWLINEdef _make_file_info(dataset_name_nevents):NEWLINE return FileInfo(*dataset_name_nevents, runs=get_runs(dataset_name_nevents[1]))NEWLINENEWLINEdef get_chunks(long_list, chunk_size):NEWLINE """NEWLINE Generates list of sub-lists of `long_list` with a maximum size ofNEWLINE `chunk_size`.NEWLINENEWLINE Arguments:NEWLINE - `long_list`: original listNEWLINE - `chunk_size`: maximum size of created sub-listsNEWLINE """NEWLINENEWLINE for i in range(0, len(long_list), chunk_size):NEWLINE yield long_list[i:i+chunk_size]NEWLINENEWLINENEWLINEdef merge_strings(strings):NEWLINE """Merge strings in `strings` into a common string.NEWLINENEWLINE Arguments:NEWLINE - `strings`: list of stringsNEWLINE """NEWLINENEWLINE if type(strings) == str:NEWLINE return stringsNEWLINE elif len(strings) == 0:NEWLINE return ""NEWLINE elif len(strings) == 1:NEWLINE return strings[0]NEWLINE elif len(strings) == 2:NEWLINE first = strings[0]NEWLINE second = strings[1]NEWLINE else:NEWLINE first = merge_strings(strings[:-1])NEWLINE second = strings[-1]NEWLINENEWLINE merged_string = ""NEWLINE blocks = difflib.SequenceMatcher(None, first, second).get_matching_blocks()NEWLINENEWLINE last_i, last_j, last_n = 0, 0, 0NEWLINE for i, j, n in blocks:NEWLINE merged_string += first[last_i+last_n:i]NEWLINE merged_string += second[last_j+last_n:j]NEWLINE merged_string += first[i:i+n]NEWLINE last_i, last_j, last_n = i, j, nNEWLINENEWLINE return str(merged_string)NEWLINENEWLINENEWLINE################################################################################NEWLINEif __name__ == "__main__":NEWLINE try:NEWLINE main()NEWLINE except KeyboardInterrupt:NEWLINE passNEWLINE |
genero = ''NEWLINEwhile genero != 'F' and genero != 'M':NEWLINE genero = str(input('Gênero [M/F]: ')).upper()NEWLINE if genero == 'F':NEWLINE print('Seu gênero é FEMININO!')NEWLINE if genero == 'M':NEWLINE print('Seu gênero é MASCULINO!')NEWLINEprint('FIM') |
#!/usr/bin/env pythonNEWLINE# -*- coding: utf-8 -*-NEWLINEimport jsonNEWLINENEWLINEfrom alipay.aop.api.constant.ParamConstants import *NEWLINENEWLINENEWLINEclass FengdieActivityCreatePageData(object):NEWLINENEWLINE def __init__(self):NEWLINE self._name = NoneNEWLINE self._schema_data = NoneNEWLINENEWLINE @propertyNEWLINE def name(self):NEWLINE return self._nameNEWLINENEWLINE @name.setterNEWLINE def name(self, value):NEWLINE self._name = valueNEWLINE @propertyNEWLINE def schema_data(self):NEWLINE return self._schema_dataNEWLINENEWLINE @schema_data.setterNEWLINE def schema_data(self, value):NEWLINE self._schema_data = valueNEWLINENEWLINENEWLINE def to_alipay_dict(self):NEWLINE params = dict()NEWLINE if self.name:NEWLINE if hasattr(self.name, 'to_alipay_dict'):NEWLINE params['name'] = self.name.to_alipay_dict()NEWLINE else:NEWLINE params['name'] = self.nameNEWLINE if self.schema_data:NEWLINE if hasattr(self.schema_data, 'to_alipay_dict'):NEWLINE params['schema_data'] = self.schema_data.to_alipay_dict()NEWLINE else:NEWLINE params['schema_data'] = self.schema_dataNEWLINE return paramsNEWLINENEWLINE @staticmethodNEWLINE def from_alipay_dict(d):NEWLINE if not d:NEWLINE return NoneNEWLINE o = FengdieActivityCreatePageData()NEWLINE if 'name' in d:NEWLINE o.name = d['name']NEWLINE if 'schema_data' in d:NEWLINE o.schema_data = d['schema_data']NEWLINE return oNEWLINENEWLINENEWLINE |
import datetimeNEWLINEimport mockNEWLINEimport pytestNEWLINEfrom urllib.request import HTTPErrorNEWLINENEWLINEfrom marshmallow.exceptions import ValidationErrorNEWLINEimport responsesNEWLINEfrom werkzeug.security import generate_password_hash, check_password_hashNEWLINENEWLINEfrom database.models import AttestationTypesNEWLINEfrom database.models import AttestationNEWLINEfrom logic.attestation_service import (NEWLINE VerificationService,NEWLINE VerificationServiceResponseNEWLINE)NEWLINEfrom logic.attestation_service import CLAIM_TYPESNEWLINEfrom logic.attestation_service import twitter_access_token_urlNEWLINEfrom logic.attestation_service import twitter_request_token_urlNEWLINEfrom logic.service_utils import (NEWLINE AirbnbVerificationError,NEWLINE EmailVerificationError,NEWLINE FacebookVerificationError,NEWLINE PhoneVerificationError,NEWLINE TwitterVerificationError,NEWLINE)NEWLINEfrom tests.helpers.eth_utils import sample_eth_address, str_ethNEWLINENEWLINENEWLINESIGNATURE_LENGTH = [email protected] test_send_phone_verification_success():NEWLINE responses.add(NEWLINE responses.POST,NEWLINE 'https://api.authy.com/protected/json/phones/verification/start',NEWLINE status=200NEWLINE )NEWLINENEWLINE args = {NEWLINE 'country_calling_code': '1',NEWLINE 'phone': '12341234',NEWLINE 'method': 'sms',NEWLINE 'locale': NoneNEWLINE }NEWLINE response = VerificationService.send_phone_verification(**args)NEWLINE assert isinstance(response, VerificationServiceResponse)[email protected] test_send_phone_verification_invalid_number():NEWLINE responses.add(NEWLINE responses.POST,NEWLINE 'https://api.authy.com/protected/json/phones/verification/start',NEWLINE json={'error_code': '60033'},NEWLINE status=400NEWLINE )NEWLINENEWLINE args = {NEWLINE 'country_calling_code': '1',NEWLINE 'phone': '1234',NEWLINE 'method': 'sms',NEWLINE 'locale': NoneNEWLINE }NEWLINE with pytest.raises(ValidationError) as validation_err:NEWLINE VerificationService.send_phone_verification(**args)NEWLINENEWLINE assert(validation_err.value.messages[0]) == 'Phone number is invalid.'NEWLINE assert(validation_err.value.field_names[0]) == 'phone'NEWLINENEWLINE # Verify attestation not storedNEWLINE attestations = Attestation.query.all()NEWLINE assert(len(attestations)) == [email protected] test_send_phone_verification_cant_sms_landline():NEWLINE responses.add(NEWLINE responses.POST,NEWLINE 'https://api.authy.com/protected/json/phones/verification/start',NEWLINE json={'error_code': '60082'},NEWLINE status=403NEWLINE )NEWLINENEWLINE args = {NEWLINE 'country_calling_code': '1',NEWLINE 'phone': '1234',NEWLINE 'method': 'sms',NEWLINE 'locale': NoneNEWLINE }NEWLINE with pytest.raises(ValidationError) as validation_err:NEWLINE VerificationService.send_phone_verification(**args)NEWLINENEWLINE assert(validation_err.value.messages[0]) == 'Cannot send SMS to landline.'NEWLINE assert(validation_err.value.field_names[0]) == 'phone'[email protected] test_send_phone_verification_twilio_error():NEWLINE responses.add(NEWLINE responses.POST,NEWLINE 'https://api.authy.com/protected/json/phones/verification/start',NEWLINE json={'error_code': '60060'}, # Account is suspendedNEWLINE status=503NEWLINE )NEWLINENEWLINE args = {NEWLINE 'country_calling_code': '1',NEWLINE 'phone': '1234',NEWLINE 'method': 'sms',NEWLINE 'locale': NoneNEWLINE }NEWLINE with pytest.raises(PhoneVerificationError) as service_err:NEWLINE VerificationService.send_phone_verification(**args)NEWLINENEWLINE assert(str(service_err.value)) == \NEWLINE 'Could not send verification code. Please try again shortly.'[email protected] test_verify_phone_valid_code(app):NEWLINE responses.add(NEWLINE responses.GET,NEWLINE 'https://api.authy.com/protected/json/phones/verification/check',NEWLINE json={NEWLINE 'message': 'Verification code is correct.',NEWLINE 'success': TrueNEWLINE }NEWLINE )NEWLINENEWLINE args = {NEWLINE 'eth_address': str_eth(sample_eth_address),NEWLINE 'country_calling_code': '1',NEWLINE 'phone': '12341234',NEWLINE 'code': '123456'NEWLINE }NEWLINE with app.test_request_context():NEWLINE response = VerificationService.verify_phone(**args)NEWLINE assert isinstance(response, VerificationServiceResponse)NEWLINENEWLINE assert len(response.data['signature']) == SIGNATURE_LENGTHNEWLINE assert response.data['claim_type'] == CLAIM_TYPES['phone']NEWLINE assert response.data['data'] == 'phone verified'NEWLINENEWLINE attestations = Attestation.query.all()NEWLINE assert(len(attestations)) == 1NEWLINE assert(attestations[0].method) == AttestationTypes.PHONENEWLINE assert(attestations[0].value) == "1 12341234"[email protected] test_verify_phone_expired_code():NEWLINE responses.add(NEWLINE responses.GET,NEWLINE 'https://api.authy.com/protected/json/phones/verification/check',NEWLINE json={'error_code': '60023'}, # No pending verificationNEWLINE status=404NEWLINE )NEWLINENEWLINE args = {NEWLINE 'eth_address': str_eth(sample_eth_address),NEWLINE 'country_calling_code': '1',NEWLINE 'phone': '12341234',NEWLINE 'code': '123456'NEWLINE }NEWLINE with pytest.raises(ValidationError) as validation_err:NEWLINE VerificationService.verify_phone(**args)NEWLINENEWLINE assert(validation_err.value.messages[0]NEWLINE ) == 'Verification code has expired.'NEWLINE assert(validation_err.value.field_names[0]) == 'code'[email protected] test_verify_phone_invalid_code():NEWLINE responses.add(NEWLINE responses.GET,NEWLINE 'https://api.authy.com/protected/json/phones/verification/check',NEWLINE json={'error_code': '60022'}, # No pending verificationNEWLINE status=401NEWLINE )NEWLINENEWLINE args = {NEWLINE 'eth_address': str_eth(sample_eth_address),NEWLINE 'country_calling_code': '1',NEWLINE 'phone': '12341234',NEWLINE 'code': 'garbage'NEWLINE }NEWLINE with pytest.raises(ValidationError) as validation_err:NEWLINE VerificationService.verify_phone(**args)NEWLINENEWLINE assert(validation_err.value.messages[0]NEWLINE ) == 'Verification code is incorrect.'NEWLINE assert(validation_err.value.field_names[0]) == 'code'[email protected]('logic.attestation_service._send_email_using_sendgrid')[email protected]('logic.attestation_service.datetime')NEWLINEdef test_send_email_verification(NEWLINE mock_datetime,NEWLINE mock_send_email_using_sendgrid):NEWLINE mock_send_email_using_sendgrid.return_value = TrueNEWLINENEWLINE now = datetime.datetime.utcnow()NEWLINE expire_in = datetime.timedelta(minutes=30)NEWLINE mock_datetime.datetime.utcnow.return_value = nowNEWLINE mock_datetime.timedelta.return_value = expire_inNEWLINENEWLINE email = '[email protected]'NEWLINE with mock.patch('logic.attestation_service.session', dict()) as session:NEWLINE response = VerificationService.send_email_verification(email)NEWLINE assert isinstance(response, VerificationServiceResponse)NEWLINE assert 'email_attestation' in sessionNEWLINE assert len(session['email_attestation']['code']) == 6NEWLINE assert session['email_attestation']['expiry'] == now + expire_inNEWLINE assert check_password_hash(NEWLINE session['email_attestation']['email'], emailNEWLINE )[email protected]('logic.attestation_service._send_email_using_sendgrid')NEWLINEdef test_send_email_verification_sendgrid_error(NEWLINE mock_send_email_using_sendgrid):NEWLINE mock_send_email_using_sendgrid.side_effect = AttributeErrorNEWLINENEWLINE with mock.patch('logic.attestation_service.session', dict()):NEWLINE with pytest.raises(EmailVerificationError) as service_err:NEWLINE VerificationService.send_email_verification('[email protected]')NEWLINENEWLINE assert(str(service_err.value)) == \NEWLINE 'Could not send verification code. Please try again shortly.'[email protected]('logic.attestation_service.session')NEWLINEdef test_verify_email_valid_code(mock_session, app):NEWLINE session_dict = {NEWLINE 'email_attestation': {NEWLINE 'email': generate_password_hash('[email protected]'),NEWLINE 'code': '12345',NEWLINE 'expiry': datetime.datetime.utcnow() + datetime.timedelta(minutes=30)NEWLINE }NEWLINE }NEWLINENEWLINE args = {NEWLINE 'eth_address': str_eth(sample_eth_address),NEWLINE 'email': '[email protected]',NEWLINE 'code': '12345'NEWLINE }NEWLINENEWLINE with mock.patch('logic.attestation_service.session', session_dict):NEWLINE with app.test_request_context():NEWLINE response = VerificationService.verify_email(**args)NEWLINENEWLINE assert isinstance(response, VerificationServiceResponse)NEWLINENEWLINE assert len(response.data['signature']) == SIGNATURE_LENGTHNEWLINE assert response.data['claim_type'] == CLAIM_TYPES['email']NEWLINE assert response.data['data'] == 'email verified'NEWLINENEWLINE # Verify attestation stored in databaseNEWLINE attestations = Attestation.query.all()NEWLINE assert(len(attestations)) == 1NEWLINE assert(attestations[0].method) == AttestationTypes.EMAILNEWLINE assert(attestations[0].value) == "[email protected]"NEWLINENEWLINENEWLINEdef test_verify_email_expired_code():NEWLINE # Mock a session object with an expiry time in the pastNEWLINE session_dict = {NEWLINE 'email_attestation': {NEWLINE 'email': generate_password_hash('[email protected]'),NEWLINE 'code': '12345',NEWLINE 'expiry': datetime.datetime.utcnow() - datetime.timedelta(minutes=30)NEWLINE }NEWLINE }NEWLINENEWLINE args = {NEWLINE 'email': '[email protected]',NEWLINE 'code': '12345',NEWLINE 'eth_address': str_eth(sample_eth_address)NEWLINE }NEWLINENEWLINE with mock.patch('logic.attestation_service.session', session_dict):NEWLINE with pytest.raises(ValidationError) as validation_err:NEWLINE VerificationService.verify_email(**args)NEWLINENEWLINE assert(validation_err.value.messages[0]NEWLINE ) == 'Verification code has expired.'NEWLINE assert(validation_err.value.field_names[0]) == 'code'NEWLINENEWLINE # Verify attestation not storedNEWLINE attestations = Attestation.query.all()NEWLINE assert(len(attestations)) == [email protected]('logic.attestation_service.session')NEWLINEdef test_verify_email_invalid_code(mock_session):NEWLINE session_dict = {NEWLINE 'email_attestation': {NEWLINE 'email': generate_password_hash('[email protected]'),NEWLINE 'code': '12345',NEWLINE 'expiry': datetime.datetime.utcnow() + datetime.timedelta(minutes=30)NEWLINE }NEWLINE }NEWLINENEWLINE args = {NEWLINE 'eth_address': str_eth(sample_eth_address),NEWLINE 'email': '[email protected]',NEWLINE 'code': '54321'NEWLINE }NEWLINENEWLINE with mock.patch('logic.attestation_service.session', session_dict):NEWLINE with pytest.raises(ValidationError) as validation_err:NEWLINE VerificationService.verify_email(**args)NEWLINENEWLINE assert(validation_err.value.messages[0]NEWLINE ) == 'Verification code is incorrect.'NEWLINE assert(validation_err.value.field_names[0]) == 'code'NEWLINENEWLINE # Verify attestation not storedNEWLINE attestations = Attestation.query.all()NEWLINE assert(len(attestations)) == 0NEWLINENEWLINENEWLINEdef test_verify_email_no_verification_sent():NEWLINE args = {NEWLINE 'eth_address': str_eth(sample_eth_address),NEWLINE 'email': '[email protected]',NEWLINE 'code': '54321'NEWLINE }NEWLINENEWLINE with mock.patch('logic.attestation_service.session', dict()):NEWLINE with pytest.raises(EmailVerificationError) as verification_err:NEWLINE VerificationService.verify_email(**args)NEWLINENEWLINE assert(verification_err.value.message) == \NEWLINE 'No verification code was found.'NEWLINENEWLINE # Verify attestation not storedNEWLINE attestations = Attestation.query.all()NEWLINE assert(len(attestations)) == 0NEWLINENEWLINENEWLINEdef test_verify_email_invalid_email():NEWLINE session_dict = {NEWLINE 'email_attestation': {NEWLINE 'email': generate_password_hash('[email protected]'),NEWLINE 'code': '12345',NEWLINE 'expiry': datetime.datetime.utcnow() + datetime.timedelta(minutes=30)NEWLINE }NEWLINE }NEWLINENEWLINE args = {NEWLINE 'eth_address': str_eth(sample_eth_address),NEWLINE 'email': '[email protected]',NEWLINE 'code': '54321'NEWLINE }NEWLINENEWLINE with mock.patch('logic.attestation_service.session', session_dict):NEWLINE with pytest.raises(EmailVerificationError) as verification_err:NEWLINE VerificationService.verify_email(**args)NEWLINENEWLINE assert(verification_err.value.message) == \NEWLINE 'No verification code was found for that email.'NEWLINENEWLINE # Verify attestation not storedNEWLINE attestations = Attestation.query.all()NEWLINE assert(len(attestations)) == 0NEWLINENEWLINENEWLINEdef test_facebook_auth_url():NEWLINE resp = VerificationService.facebook_auth_url()NEWLINE resp_data = resp.dataNEWLINE assert resp_data['url'] == (NEWLINE 'https://www.facebook.com/v2.12/dialog/oauth?client_id'NEWLINE '=facebook-client-id&redirect_uri'NEWLINE '=https://testhost.com/redirects/facebook/'NEWLINE )[email protected] test_verify_facebook_valid_code(app):NEWLINE auth_url = 'https://graph.facebook.com/v2.12/oauth/access_token' + \NEWLINE '?client_id=facebook-client-id' + \NEWLINE '&client_secret=facebook-client-secret' + \NEWLINE '&redirect_uri=https%3A%2F%2Ftesthost.com%2Fredirects%2Ffacebook%2F' + \NEWLINE '&code=abcde12345'NEWLINE verify_url = 'https://graph.facebook.com/me?access_token=12345'NEWLINENEWLINE responses.add(NEWLINE responses.GET,NEWLINE auth_url,NEWLINE json={'access_token': 12345},NEWLINE status=200NEWLINE )NEWLINENEWLINE responses.add(NEWLINE responses.GET,NEWLINE verify_url,NEWLINE json={'name': 'Origin Protocol'},NEWLINE status=200NEWLINE )NEWLINENEWLINE args = {NEWLINE 'eth_address': '0x112234455C3a32FD11230C42E7Bccd4A84e02010',NEWLINE 'code': 'abcde12345'NEWLINE }NEWLINENEWLINE with app.test_request_context():NEWLINE verification_response = VerificationService.verify_facebook(**args)NEWLINE assert isinstance(verification_response, VerificationServiceResponse)NEWLINE assert len(verification_response.data['signature']) == SIGNATURE_LENGTHNEWLINE assert verification_response.data['claim_type'] == CLAIM_TYPES['facebook']NEWLINE assert verification_response.data['data'] == 'facebook verified'NEWLINENEWLINE # Verify attestation stored in databaseNEWLINE attestations = Attestation.query.all()NEWLINE assert(len(attestations)) == 1NEWLINE assert(attestations[0].method) == AttestationTypes.FACEBOOKNEWLINE assert(attestations[0].value) == 'Origin Protocol'[email protected] test_verify_facebook_invalid_code():NEWLINE auth_url = 'https://graph.facebook.com/v2.12/oauth/access_token' + \NEWLINE '?client_id=facebook-client-id' + \NEWLINE '&client_secret=facebook-client-secret' + \NEWLINE '&redirect_uri=https%3A%2F%2Ftesthost.com%2Fredirects%2Ffacebook%2F' + \NEWLINE '&code=bananas'NEWLINENEWLINE responses.add(NEWLINE responses.GET,NEWLINE auth_url,NEWLINE json={'error': 'invalid'},NEWLINE status=403NEWLINE )NEWLINENEWLINE args = {NEWLINE 'eth_address': '0x112234455C3a32FD11230C42E7Bccd4A84e02010',NEWLINE 'code': 'bananas'NEWLINE }NEWLINENEWLINE with pytest.raises(FacebookVerificationError) as service_err:NEWLINE VerificationService.verify_facebook(**args)NEWLINENEWLINE assert str(service_err.value) == 'The code you provided is invalid.'NEWLINENEWLINE # Verify attestation not storedNEWLINE attestations = Attestation.query.all()NEWLINE assert(len(attestations)) == [email protected] test_twitter_auth_url(app):NEWLINE response_content = b'oauth_token=peaches&oauth_token_secret=pears'NEWLINENEWLINE responses.add(NEWLINE responses.POST,NEWLINE twitter_request_token_url,NEWLINE body=response_content,NEWLINE status=200NEWLINE )NEWLINENEWLINE with app.test_request_context():NEWLINE verification_response = VerificationService.twitter_auth_url()NEWLINE assert isinstance(verification_response, VerificationServiceResponse)NEWLINE assert verification_response.data['url'] == (NEWLINE 'https://api.twitter.com/oauth/authenticate?oauth_token=peaches'NEWLINE )[email protected]('logic.attestation_service.session')[email protected] test_verify_twitter_valid_code(mock_session, app):NEWLINE responses.add(NEWLINE responses.POST,NEWLINE twitter_access_token_url,NEWLINE body=b'screen_name=originprotocol',NEWLINE status=200NEWLINE )NEWLINENEWLINE args = {NEWLINE 'eth_address': '0x112234455C3a32FD11230C42E7Bccd4A84e02010',NEWLINE 'oauth_verifier': 'blueberries'NEWLINE }NEWLINENEWLINE session_dict = {NEWLINE 'request_token': {NEWLINE 'oauth_token': '1234',NEWLINE 'oauth_token_secret': '5678'NEWLINE }NEWLINE }NEWLINENEWLINE with mock.patch('logic.attestation_service.session', session_dict):NEWLINE with app.test_request_context():NEWLINE verification_response = VerificationService.verify_twitter(**args)NEWLINENEWLINE assert isinstance(verification_response, VerificationServiceResponse)NEWLINENEWLINE assert len(verification_response.data['signature']) == SIGNATURE_LENGTHNEWLINE assert verification_response.data['claim_type'] == CLAIM_TYPES['twitter']NEWLINE assert verification_response.data['data'] == 'twitter verified'NEWLINENEWLINE # Verify attestation stored in databaseNEWLINE attestations = Attestation.query.all()NEWLINE assert(len(attestations)) == 1NEWLINE assert(attestations[0].method) == AttestationTypes.TWITTERNEWLINE assert(attestations[0].value) == 'originprotocol'[email protected]('logic.attestation_service.session')[email protected] test_verify_twitter_invalid_verifier(mock_session, app):NEWLINE responses.add(NEWLINE responses.POST,NEWLINE twitter_access_token_url,NEWLINE status=401NEWLINE )NEWLINENEWLINE args = {NEWLINE 'eth_address': '0x112234455C3a32FD11230C42E7Bccd4A84e02010',NEWLINE 'oauth_verifier': 'pineapples'NEWLINE }NEWLINENEWLINE session_dict = {NEWLINE 'request_token': {NEWLINE 'oauth_token': '1234',NEWLINE 'oauth_token_secret': '5678'NEWLINE }NEWLINE }NEWLINENEWLINE with mock.patch('logic.attestation_service.session', session_dict):NEWLINE with pytest.raises(TwitterVerificationError) as service_err:NEWLINE with app.test_request_context():NEWLINE VerificationService.verify_twitter(**args)NEWLINENEWLINE assert str(service_err.value) == 'The verifier you provided is invalid.'NEWLINENEWLINE # Verify attestation not storedNEWLINE attestations = Attestation.query.all()NEWLINE assert(len(attestations)) == [email protected]('logic.attestation_service.requests')[email protected]('logic.attestation_service.session')NEWLINEdef test_verify_twitter_invalid_session(mock_session, mock_requests):NEWLINE args = {NEWLINE 'eth_address': '0x112234455C3a32FD11230C42E7Bccd4A84e02010',NEWLINE 'oauth_verifier': 'pineapples'NEWLINE }NEWLINENEWLINE with pytest.raises(TwitterVerificationError) as service_err:NEWLINE VerificationService.verify_twitter(**args)NEWLINENEWLINE assert str(service_err.value) == 'Session not found.'NEWLINENEWLINE # Verify attestation not storedNEWLINE attestations = Attestation.query.all()NEWLINE assert(len(attestations)) == 0NEWLINENEWLINENEWLINEdef test_generate_airbnb_verification_code():NEWLINE resp = VerificationService.generate_airbnb_verification_code(NEWLINE '0x112234455C3a32FD11230C42E7Bccd4A84e02010',NEWLINE '123456'NEWLINE )NEWLINE assert isinstance(resp, VerificationServiceResponse)NEWLINENEWLINE assert resp.data['code'] == "art brick aspect accident brass betray antenna"NEWLINENEWLINENEWLINEdef test_generate_airbnb_verification_code_incorrect_user_id_format():NEWLINE with pytest.raises(ValidationError) as validation_error:NEWLINE VerificationService.generate_airbnb_verification_code(NEWLINE '0x112234455C3a32FD11230C42E7Bccd4A84e02010',NEWLINE '12a34'NEWLINE )NEWLINENEWLINE assert str(validation_error.value) == 'AirbnbUserId should be a number.'[email protected]('logic.attestation_service.urlopen')NEWLINEdef test_verify_airbnb(mock_urllib_request, app):NEWLINE mock_urllib_request.return_value.read.return_value = """NEWLINE <html><div>NEWLINE Airbnb profile descriptionNEWLINE Origin verification code: art brick aspect accident brass betray antennaNEWLINE some more profile descriptionNEWLINE </div></html>""".encode('utf-8')NEWLINE airbnbUserId = "123456"NEWLINENEWLINE with app.test_request_context():NEWLINE verification_response = VerificationService.verify_airbnb(NEWLINE '0x112234455C3a32FD11230C42E7Bccd4A84e02010',NEWLINE airbnbUserIdNEWLINE )NEWLINE assert isinstance(verification_response, VerificationServiceResponse)NEWLINENEWLINE assert len(verification_response.data['signature']) == SIGNATURE_LENGTHNEWLINE assert verification_response.data['claim_type'] == CLAIM_TYPES['airbnb']NEWLINE assert verification_response.data['data'] == 'airbnbUserId:' + airbnbUserIdNEWLINENEWLINE # Verify attestation stored in databaseNEWLINE attestations = Attestation.query.all()NEWLINE assert(len(attestations)) == 1NEWLINE assert(attestations[0].method) == AttestationTypes.AIRBNBNEWLINE assert(attestations[0].value) == "123456"[email protected]('logic.attestation_service.urlopen')NEWLINEdef test_verify_airbnb_verification_code_missing(mock_urllib_request):NEWLINE mock_urllib_request.return_value.read.return_value = """NEWLINE <html><div>NEWLINE Airbnb profile description some more profile descriptionNEWLINE </div></html>""".encode('utf-8')NEWLINENEWLINE with pytest.raises(AirbnbVerificationError) as service_err:NEWLINE VerificationService.verify_airbnb(NEWLINE '0x112234455C3a32FD11230C42E7Bccd4A84e02010',NEWLINE "123456"NEWLINE )NEWLINENEWLINE assert str(service_err.value) == "Origin verification code: art brick aspect " \NEWLINE + "accident brass betray antenna has not been found in user's Airbnb profile."NEWLINENEWLINE # Verify attestation not storedNEWLINE attestations = Attestation.query.all()NEWLINE assert(len(attestations)) == [email protected]('logic.attestation_service.urlopen')NEWLINEdef test_verify_airbnb_verification_code_incorrect(mock_urllib_request):NEWLINE mock_urllib_request.return_value.read.return_value = """NEWLINE <html><div>NEWLINE Airbnb profile descriptionNEWLINE Origin verification code: art brick aspect pimpmobileNEWLINE some more profile descriptionNEWLINE </div></html>""".encode('utf-8')NEWLINENEWLINE with pytest.raises(AirbnbVerificationError) as service_err:NEWLINE VerificationService.verify_airbnb(NEWLINE '0x112234455C3a32FD11230C42E7Bccd4A84e02010',NEWLINE "123456"NEWLINE )NEWLINENEWLINE assert str(service_err.value) == "Origin verification code: art brick aspect " \NEWLINE + "accident brass betray antenna has not been found in user's Airbnb profile."NEWLINENEWLINE # Verify attestation not storedNEWLINE attestations = Attestation.query.all()NEWLINE assert(len(attestations)) == [email protected]('logic.attestation_service.urlopen')NEWLINEdef test_verify_airbnb_verification_code_incorrect_user_id_format(NEWLINE mock_urllib_request):NEWLINE mock_urllib_request.return_value.read.return_value = """NEWLINE <html><div>NEWLINE Airbnb profile descriptionNEWLINE Origin verification code: art brick aspect accident brass betray antennaNEWLINE some more profile descriptionNEWLINE </div></html>""".encode('utf-8')NEWLINENEWLINE with pytest.raises(ValidationError) as validation_error:NEWLINE VerificationService.verify_airbnb(NEWLINE '0x112234455C3a32FD11230C42E7Bccd4A84e02010',NEWLINE "12a34"NEWLINE )NEWLINENEWLINE assert str(validation_error.value) == 'AirbnbUserId should be a number.'NEWLINENEWLINE # Verify attestation not storedNEWLINE attestations = Attestation.query.all()NEWLINE assert(len(attestations)) == [email protected]('logic.attestation_service.urlopen', side_effect=HTTPError(NEWLINE 'https://www.airbnb.com/users/show/99999999999999999',NEWLINE 404,NEWLINE "User not found",NEWLINE {},NEWLINE {}NEWLINE))NEWLINEdef test_verify_airbnb_verification_code_non_existing_user(NEWLINE mock_urllib_request):NEWLINE with pytest.raises(AirbnbVerificationError) as service_err:NEWLINE VerificationService.verify_airbnb(NEWLINE '0x112234455C3a32FD11230C42E7Bccd4A84e02010',NEWLINE "99999999999999999"NEWLINE )NEWLINENEWLINE assert str(NEWLINE service_err.value) == 'Airbnb user id: 99999999999999999 not found.'NEWLINENEWLINE # Verify attestation not storedNEWLINE attestations = Attestation.query.all()NEWLINE assert(len(attestations)) == [email protected]('logic.attestation_service.urlopen', side_effect=HTTPError(NEWLINE 'https://www.airbnb.com/users/show/123',NEWLINE 500,NEWLINE "Internal server error",NEWLINE {},NEWLINE {}NEWLINE))NEWLINEdef test_verify_airbnb_verification_code_internal_server_error(NEWLINE mock_urllib_request):NEWLINE with pytest.raises(AirbnbVerificationError) as service_err:NEWLINE VerificationService.verify_airbnb(NEWLINE '0x112234455C3a32FD11230C42E7Bccd4A84e02010',NEWLINE "123"NEWLINE )NEWLINENEWLINE assert str(service_err.value) == "Can not fetch user's Airbnb profile."NEWLINE |
import sysNEWLINEimport importlib.resourcesNEWLINEimport pickleNEWLINEimport argparseNEWLINEimport reNEWLINEfrom contextlib import contextmanagerNEWLINEfrom collections import CounterNEWLINEfrom apycula import chipdbNEWLINENEWLINEclass Bba(object):NEWLINENEWLINE def __init__(self, file):NEWLINE self.file = fileNEWLINE self.block_idx = Counter()NEWLINENEWLINE def __getattr__(self, attr):NEWLINE def write_value(val):NEWLINE self.file.write(f"{attr} {val}\n")NEWLINE return write_valueNEWLINENEWLINE def str(self, val, sep="|"):NEWLINE self.file.write(f"str {sep}{val}{sep}\n")NEWLINENEWLINE @contextmanagerNEWLINE def block(self, prefix="block"):NEWLINE idx = self.block_idx[prefix]NEWLINE self.block_idx.update([prefix])NEWLINE name = f"{prefix}_{idx}"NEWLINE self.push(name)NEWLINE self.label(name)NEWLINE try:NEWLINE yield nameNEWLINE finally:NEWLINE self.pop(name)NEWLINENEWLINEconstids = ['']NEWLINEids = []NEWLINEdef id_string(s):NEWLINE try:NEWLINE return constids.index(s)NEWLINE except ValueError:NEWLINE passNEWLINE try:NEWLINE return len(constids)+ids.index(s)NEWLINE except ValueError:NEWLINE ids.append(s)NEWLINE return len(constids)+len(ids)-1NEWLINENEWLINEdef id_strings(b):NEWLINE with b.block('idstrings') as blk:NEWLINE for s in ids:NEWLINE b.str(s)NEWLINE b.u16(len(constids))NEWLINE b.u16(len(ids))NEWLINE b.ref(blk)NEWLINENEWLINEdef write_pips(b, pips):NEWLINE num = 0NEWLINE with b.block("pips") as blk:NEWLINE for dest, srcs in pips.items():NEWLINE for src in srcs:NEWLINE num += 1NEWLINE b.u16(id_string(dest))NEWLINE b.u16(id_string(src))NEWLINE b.u32(num)NEWLINE b.ref(blk)NEWLINENEWLINEdef write_bels(b, bels):NEWLINE with b.block("bels") as blk:NEWLINE for typ, bel in bels.items():NEWLINE if bel.simplified_iob:NEWLINE b.u16(id_string(f'{typ}S'))NEWLINE else:NEWLINE b.u16(id_string(typ))NEWLINE with b.block("portmap") as port_blk:NEWLINE for dest, src in bel.portmap.items():NEWLINE b.u16(id_string(dest))NEWLINE b.u16(id_string(src))NEWLINE b.u16(len(bel.portmap))NEWLINE b.ref(port_blk)NEWLINENEWLINENEWLINE b.u32(len(bels))NEWLINE b.ref(blk)NEWLINENEWLINEdef write_aliases(b, aliases):NEWLINE with b.block('aliases') as blk:NEWLINE for dest, src in aliases.items():NEWLINE b.u16(id_string(dest))NEWLINE b.u16(id_string(src))NEWLINE b.u32(len(aliases))NEWLINE b.ref(blk)NEWLINENEWLINEdef write_tile(b, tile):NEWLINE with b.block('tile') as blk:NEWLINE write_bels(b, tile.bels)NEWLINE write_pips(b, tile.pips)NEWLINE write_pips(b, tile.clock_pips)NEWLINE write_aliases(b, tile.aliases)NEWLINE return blkNEWLINENEWLINEdef write_grid(b, grid):NEWLINE tiles = {}NEWLINE with b.block('grid') as grid_block:NEWLINE for row in grid:NEWLINE for tile in row:NEWLINE if id(tile) in tiles:NEWLINE b.ref(tiles[id(tile)])NEWLINE else:NEWLINE blk = write_tile(b, tile)NEWLINE tiles[id(tile)] = blkNEWLINE b.ref(blk)NEWLINE b.ref(grid_block)NEWLINENEWLINENEWLINEdef write_global_aliases(b, db):NEWLINE with b.block('aliases') as blk:NEWLINE aliases = sorted(db.aliases.items(),NEWLINE key=lambda i: (i[0][0], i[0][1], id_string(i[0][2])))NEWLINE for (drow, dcol, dest), (srow, scol, src) in aliases:NEWLINE b.u16(drow)NEWLINE b.u16(dcol)NEWLINE b.u16(id_string(dest))NEWLINE b.u16(srow)NEWLINE b.u16(scol)NEWLINE b.u16(id_string(src))NEWLINE b.u32(len(db.aliases))NEWLINE b.ref(blk)NEWLINENEWLINEdef write_timing(b, timing):NEWLINE with b.block('timing') as blk:NEWLINE for speed, groups in timing.items():NEWLINE b.u32(id_string(speed))NEWLINE with b.block('timing_group') as tg:NEWLINE for group, types in groups.items():NEWLINE b.u32(id_string(group))NEWLINE with b.block('timing_types') as tt:NEWLINE for name, items in types.items():NEWLINE try:NEWLINE items[0] # QUACKING THE DUCKNEWLINE b.u32(id_string(name))NEWLINE for item in items:NEWLINE b.u32(int(item*1000))NEWLINE except TypeError:NEWLINE passNEWLINE b.u32(len(types))NEWLINE b.ref(tt)NEWLINE b.u32(len(groups))NEWLINE b.ref(tg)NEWLINE b.u32(len(timing))NEWLINE b.ref(blk)NEWLINENEWLINEdef write_partnumber_packages(b, db):NEWLINE with b.block("partnumber_packages") as blk:NEWLINE for partnumber, pkg_rec in db.packages.items():NEWLINE pkg, device, speed = pkg_recNEWLINE b.u32(id_string(partnumber))NEWLINE b.u32(id_string(pkg))NEWLINE b.u32(id_string(device))NEWLINE b.u32(id_string(speed))NEWLINE b.u32(len(db.packages))NEWLINE b.ref(blk)NEWLINENEWLINEpin_re = re.compile(r"IO([TBRL])(\d+)([A-Z])")NEWLINEdef iob2bel(db, name):NEWLINE banks = {'T': [(1, n) for n in range(1, db.cols)],NEWLINE 'B': [(db.rows, n) for n in range(1, db.cols)],NEWLINE 'L': [(n, 1) for n in range(1, db.rows)],NEWLINE 'R': [(n, db.cols) for n in range(1, db.rows)]}NEWLINE side, num, pin = pin_re.match(name).groups()NEWLINE row, col = banks[side][int(num)-1]NEWLINE return f"R{row}C{col}_IOB{pin}"NEWLINENEWLINEdef write_pinout(b, db):NEWLINE with b.block("variants") as blk:NEWLINE for device, pkgs in db.pinout.items():NEWLINE b.u32(id_string(device))NEWLINE with b.block("packages") as pkgblk:NEWLINE for pkg, pins in pkgs.items():NEWLINE b.u32(id_string(pkg))NEWLINE with b.block("pins") as pinblk:NEWLINE for num, loc in pins.items():NEWLINE b.u16(id_string(num))NEWLINE b.u16(id_string(iob2bel(db, loc)))NEWLINE b.u32(len(pins))NEWLINE b.ref(pinblk)NEWLINE b.u32(len(pkgs))NEWLINE b.ref(pkgblk)NEWLINE b.u32(len(db.pinout))NEWLINE b.ref(blk)NEWLINENEWLINEdef write_chipdb(db, f, device):NEWLINE cdev=device.replace('-', '_')NEWLINE b = Bba(f)NEWLINE b.pre('#include "nextpnr.h"')NEWLINE b.pre('#include "embed.h"')NEWLINE b.pre('NEXTPNR_NAMESPACE_BEGIN')NEWLINE with b.block(f'chipdb_{cdev}') as blk:NEWLINE b.str(device)NEWLINE b.u32(1) # versionNEWLINE b.u16(db.rows)NEWLINE b.u16(db.cols)NEWLINE write_grid(b, db.grid)NEWLINE write_global_aliases(b, db)NEWLINE write_timing(b, db.timing)NEWLINE write_partnumber_packages(b, db)NEWLINE write_pinout(b, db)NEWLINE id_strings(b)NEWLINE b.post(f'EmbeddedFile chipdb_file_{cdev}("gowin/chipdb-{device}.bin", {blk});')NEWLINE b.post('NEXTPNR_NAMESPACE_END')NEWLINENEWLINEdef read_constids(f):NEWLINE xre = re.compile(r"X\((.*)\)")NEWLINE for line in f:NEWLINE m = xre.match(line)NEWLINE if m:NEWLINE constids.append(m.group(1))NEWLINE return idsNEWLINENEWLINENEWLINEdef main():NEWLINE parser = argparse.ArgumentParser(description='Make Gowin BBA')NEWLINE parser.add_argument('-d', '--device', required=True)NEWLINE parser.add_argument('-i', '--constids', type=argparse.FileType('r'), default=sys.stdin)NEWLINE parser.add_argument('-o', '--output', type=argparse.FileType('w'), default=sys.stdout)NEWLINENEWLINE args = parser.parse_args()NEWLINE read_constids(args.constids)NEWLINE with importlib.resources.open_binary("apycula", f"{args.device}.pickle") as f:NEWLINE db = pickle.load(f)NEWLINE write_chipdb(db, args.output, args.device)NEWLINENEWLINEif __name__ == "__main__":NEWLINE main()NEWLINE |
#!/usr/bin/env python3NEWLINE# coding=utf-8NEWLINENEWLINEimport numpy as npNEWLINEimport pandas as pdNEWLINENEWLINENEWLINEdef show_predict_probability(frame):NEWLINE x1 = frame['x1']NEWLINE x2 = frame['x2']NEWLINE probability = frame['probabilities']NEWLINE class1_x = [x1[i] for i, x in enumerate(probability) if x >= 0.5]NEWLINE class1_y = [x2[i] for i, x in enumerate(probability) if x >= 0.5]NEWLINE class2_x = [x1[i] for i, x in enumerate(probability) if x < 0.5]NEWLINE class2_y = [x2[i] for i, x in enumerate(probability) if x < 0.5]NEWLINE print('class1_x = \n %s' % class1_x)NEWLINE print('class1_y = \n %s' % class1_y)NEWLINE print('class2_x = \n %s' % class2_x)NEWLINE print('class2_y = \n %s' % class2_y)NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE frame = pd.DataFrame()NEWLINE n = 5NEWLINE npx1 = np.linspace(0, 9, n)NEWLINE npx2 = np.linspace(100, 109, n)NEWLINE X1, X2 = np.meshgrid(npx1, npx2)NEWLINE frame['x1'] = np.reshape(X1, n * n)NEWLINE frame['x2'] = np.reshape(X2, n * n)NEWLINE frame['probabilities'] = np.random.rand(n * n)NEWLINE print(frame)NEWLINE show_predict_probability(frame)NEWLINE |
NEWLINEimport sys, jsonNEWLINEimport cv2NEWLINEimport torchNEWLINENEWLINEPATH_EL = "../entity-linking/"NEWLINEsys.path.insert(0, PATH_EL)NEWLINENEWLINEimport clickNEWLINEimport tqdmNEWLINEfrom pycorenlp import StanfordCoreNLPNEWLINENEWLINEfrom entitylinking import core as ELNEWLINEfrom entitylinking.core.sentence import SentenceEncoderNEWLINENEWLINENEWLINEcorenlp = StanfordCoreNLP('http://semanticparsing:9000')NEWLINEcorenlp_properties = {NEWLINE 'annotators': 'tokenize, pos, ner',NEWLINE 'outputFormat': 'json'NEWLINE}NEWLINENEWLINEEL.candidate_retrieval.entity_linking_p['max.match.diff'] = 0NEWLINENEWLINEEL.mention_extraction.np_parser = EL.mention_extraction.NgramNpParser(NEWLINE exclude_pos={".", "ORDINAL", "TIME", "PERCENT", "NUMBER"},NEWLINE exclude_if_first={"WDT", "WP", "WP$", "WRB", "VBZ", "VB", "VBP"},NEWLINE exclude_prefix={"IN", "DT", "CC", "POS"},NEWLINE exclude_suffix={"IN", "DT", "CC", "JJ", "RB", "JJR", "JJS", "RBR", "RBS"},NEWLINE exclude_alone={"IN", "DT", "PDT", "POS", "PRP", "PRP$", "CC", "TO",NEWLINE "VBZ", "VBD", "VBP", "VB", "VBG", "VBN",NEWLINE "JJ", "RB", "JJR", "JJS", "RBR", "RBS",NEWLINE "MD", "WDT", "WP", "WP$", "WRB"NEWLINE })[email protected]()[email protected]('path_to_file')[email protected]('output_file')NEWLINEdef apply(path_to_file, output_file):NEWLINENEWLINE entitylinker = EL.MLLinker(path_to_model="../entity-linking/trainedmodels/VectorModel_137.torchweights",NEWLINE confidence=0.01,NEWLINE num_candidates=3,NEWLINE max_mention_len=2)NEWLINENEWLINE with open(path_to_file) as f:NEWLINE input_data = [l.strip().split(",") for l in f.readlines()][1:]NEWLINENEWLINE output_data = {}NEWLINE for parts in tqdm.tqdm(input_data):NEWLINE output_per_story = []NEWLINE for i in range(1, 7):NEWLINE s = parts[i]NEWLINE sent = entitylinker.link_entities_in_sentence_obj(EL.sentence.Sentence(input_text=s))NEWLINE sent.entities = [{k: e[k] for k in {'type', 'linkings', 'token_ids', 'poss', 'tokens', 'drop_score'}}NEWLINE for e in sent.entities if len(e['linkings']) > 0]NEWLINE for e in sent.entities:NEWLINE e['linkings'] = [(l.get('kbID'), l.get('label')) for l in e['linkings']]NEWLINE output_per_story.append(sent)NEWLINE output_data[parts[0]] = output_per_storyNEWLINE with open(output_file, "w") as out:NEWLINE json.dump(output_data, out, sort_keys=True, indent=4, cls=SentenceEncoder)NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE apply()NEWLINE |
# coding: UTF-8NEWLINENEWLINEfrom __future__ import print_functionNEWLINEfrom configparser import ConfigParserNEWLINEfrom contextlib import contextmanagerNEWLINEimport osNEWLINEimport datetimeNEWLINEfrom os.path import basename, exists, dirname, join, expanduserNEWLINEimport sysNEWLINEimport subprocessNEWLINEimport timeNEWLINEimport loggingNEWLINEimport logging.configNEWLINEimport clickNEWLINEimport termcolorNEWLINEimport colorlogNEWLINEimport MySQLdbNEWLINENEWLINElogger = logging.getLogger('.utils')NEWLINEDEBUG_ENABLED = os.environ.get('SEAFILE_DOCKER_VERBOSE', '').lower() in ('true', '1', 'yes')NEWLINENEWLINENEWLINEdef eprint(*a, **kw):NEWLINE kw['file'] = sys.stderrNEWLINE print(*a, **kw)NEWLINENEWLINEdef identity(msg, *a, **kw):NEWLINE return msgNEWLINENEWLINEcolored = identity if not os.isatty(sys.stdin.fileno()) else termcolor.coloredNEWLINEred = lambda s: colored(s, 'red')NEWLINEgreen = lambda s: colored(s, 'green')NEWLINENEWLINEdef underlined(msg):NEWLINE return '\x1b[4m{}\x1b[0m'.format(msg)NEWLINENEWLINEdef sudo(*a, **kw):NEWLINE call('sudo ' + a[0], *a[1:], **kw)NEWLINENEWLINEdef _find_flag(args, *opts, **kw):NEWLINE is_flag = kw.get('is_flag', False)NEWLINE if is_flag:NEWLINE return any([opt in args for opt in opts])NEWLINE else:NEWLINE for opt in opts:NEWLINE try:NEWLINE return args[args.index(opt) + 1]NEWLINE except ValueError:NEWLINE passNEWLINENEWLINEdef call(*a, **kw):NEWLINE dry_run = kw.pop('dry_run', False)NEWLINE quiet = kw.pop('quiet', DEBUG_ENABLED)NEWLINE cwd = kw.get('cwd', os.getcwd())NEWLINE check_call = kw.pop('check_call', True)NEWLINE reduct_args = kw.pop('reduct_args', [])NEWLINE if not quiet:NEWLINE toprint = a[0]NEWLINE args = [x.strip('"') for x in a[0].split() if '=' not in x]NEWLINE for arg in reduct_args:NEWLINE value = _find_flag(args, arg)NEWLINE toprint = toprint.replace(value, '{}**reducted**'.format(value[:3]))NEWLINE logdbg('calling: ' + green(toprint))NEWLINE logdbg('cwd: ' + green(cwd))NEWLINE kw.setdefault('shell', True)NEWLINE if not dry_run:NEWLINE if check_call:NEWLINE return subprocess.check_call(*a, **kw)NEWLINE else:NEWLINE return subprocess.Popen(*a, **kw).wait()NEWLINENEWLINE@contextmanagerNEWLINEdef cd(path):NEWLINE path = expanduser(path)NEWLINE olddir = os.getcwd()NEWLINE os.chdir(path)NEWLINE try:NEWLINE yieldNEWLINE finally:NEWLINE os.chdir(olddir)NEWLINENEWLINEdef must_makedir(p):NEWLINE p = expanduser(p)NEWLINE if not exists(p):NEWLINE logger.info('created folder %s', p)NEWLINE os.makedirs(p)NEWLINE else:NEWLINE logger.debug('folder %s already exists', p)NEWLINENEWLINEdef setup_colorlog():NEWLINE logging.config.dictConfig({NEWLINE 'version': 1,NEWLINE 'disable_existing_loggers': False,NEWLINE 'formatters': {NEWLINE 'standard': {NEWLINE 'format': '%(asctime)s [%(levelname)s] %(name)s: %(message)s'NEWLINE },NEWLINE 'colored': {NEWLINE '()': 'colorlog.ColoredFormatter',NEWLINE 'format': "%(log_color)s[%(asctime)s]%(reset)s %(blue)s%(message)s",NEWLINE 'datefmt': '%m/%d/%Y %H:%M:%S',NEWLINE },NEWLINE },NEWLINE 'handlers': {NEWLINE 'default': {NEWLINE 'level': 'INFO',NEWLINE 'formatter': 'colored',NEWLINE 'class': 'logging.StreamHandler',NEWLINE },NEWLINE },NEWLINE 'loggers': {NEWLINE '': {NEWLINE 'handlers': ['default'],NEWLINE 'level': 'INFO',NEWLINE 'propagate': TrueNEWLINE },NEWLINE 'django.request': {NEWLINE 'handlers': ['default'],NEWLINE 'level': 'WARN',NEWLINE 'propagate': FalseNEWLINE },NEWLINE }NEWLINE })NEWLINENEWLINE logging.getLogger('requests.packages.urllib3.connectionpool').setLevel(NEWLINE logging.WARNING)NEWLINENEWLINENEWLINEdef setup_logging(level=logging.INFO):NEWLINE kw = {NEWLINE 'format': '[%(asctime)s][%(module)s]: %(message)s',NEWLINE 'datefmt': '%m/%d/%Y %H:%M:%S',NEWLINE 'level': level,NEWLINE 'stream': sys.stdoutNEWLINE }NEWLINENEWLINE logging.basicConfig(**kw)NEWLINE logging.getLogger('requests.packages.urllib3.connectionpool').setLevel(NEWLINE logging.WARNING)NEWLINENEWLINEdef get_process_cmd(pid, env=False):NEWLINE env = 'e' if env else ''NEWLINE try:NEWLINE return subprocess.check_output('ps {} -o command {}'.format(env, pid),NEWLINE shell=True).strip().splitlines()[1]NEWLINE # except Exception, e:NEWLINE # print(e)NEWLINE except:NEWLINE return NoneNEWLINENEWLINEdef get_match_pids(pattern):NEWLINE pgrep_output = subprocess.check_output(NEWLINE 'pgrep -f "{}" || true'.format(pattern),NEWLINE shell=True).strip()NEWLINE return [int(pid) for pid in pgrep_output.splitlines()]NEWLINENEWLINEdef ask_for_confirm(msg):NEWLINE confirm = click.prompt(msg, default='Y')NEWLINE return confirm.lower() in ('y', 'yes')NEWLINENEWLINEdef confirm_command_to_run(cmd):NEWLINE if ask_for_confirm('Run the command: {} ?'.format(green(cmd))):NEWLINE call(cmd)NEWLINE else:NEWLINE sys.exit(1)NEWLINENEWLINEdef git_current_commit():NEWLINE return get_command_output('git rev-parse --short HEAD').strip()NEWLINENEWLINEdef get_command_output(cmd):NEWLINE shell = not isinstance(cmd, list)NEWLINE return subprocess.check_output(cmd, shell=shell)NEWLINENEWLINEdef ask_yes_or_no(msg, prompt='', default=None):NEWLINE print('\n' + msg + '\n')NEWLINE while True:NEWLINE answer = input(prompt + ' [yes/no] ').lower()NEWLINE if not answer:NEWLINE continueNEWLINENEWLINE if answer not in ('yes', 'no', 'y', 'n'):NEWLINE continueNEWLINENEWLINE if answer in ('yes', 'y'):NEWLINE return TrueNEWLINE else:NEWLINE return FalseNEWLINENEWLINEdef git_branch_exists(branch):NEWLINE return call('git rev-parse --short --verify {}'.format(branch)) == 0NEWLINENEWLINEdef to_unicode(s):NEWLINE if isinstance(s, str):NEWLINE return s.decode('utf-8')NEWLINE else:NEWLINE return sNEWLINENEWLINEdef to_utf8(s):NEWLINE if isinstance(s, str):NEWLINE return s.encode('utf-8')NEWLINE else:NEWLINE return sNEWLINENEWLINEdef git_commit_time(refspec):NEWLINE return int(get_command_output('git log -1 --format="%ct" {}'.format(NEWLINE refspec)).strip())NEWLINENEWLINEdef get_seafile_version():NEWLINE return os.environ['SEAFILE_VERSION']NEWLINENEWLINEdef get_install_dir():NEWLINE return join('/opt/seafile/' + get_conf('SEAFILE_SERVER', 'seafile-server') + '-{}'.format(get_seafile_version()))NEWLINENEWLINEdef get_script(script):NEWLINE return join(get_install_dir(), script)NEWLINENEWLINENEWLINE_config = NoneNEWLINENEWLINEdef get_conf(key, default=None):NEWLINE key = key.upper()NEWLINE return os.environ.get(key, default)NEWLINENEWLINEdef _add_default_context(context):NEWLINE default_context = {NEWLINE 'current_timestr': datetime.datetime.now().strftime('%m/%d/%Y %H:%M:%S'),NEWLINE }NEWLINE for k in default_context:NEWLINE context.setdefault(k, default_context[k])NEWLINENEWLINEdef render_template(template, target, context):NEWLINE from jinja2 import Environment, FileSystemLoaderNEWLINE env = Environment(loader=FileSystemLoader(dirname(template)))NEWLINE _add_default_context(context)NEWLINE content = env.get_template(basename(template)).render(**context)NEWLINE with open(target, 'w') as fp:NEWLINE fp.write(content)NEWLINENEWLINEdef logdbg(msg):NEWLINE if DEBUG_ENABLED:NEWLINE msg = '[debug] ' + msgNEWLINE loginfo(msg)NEWLINENEWLINEdef loginfo(msg):NEWLINE msg = '[{}] {}'.format(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'), green(msg))NEWLINE eprint(msg)NEWLINENEWLINEdef cert_has_valid_days(cert, days):NEWLINE return TrueNEWLINENEWLINEdef get_version_stamp_file():NEWLINE return '/shared/seafile/seafile-data/current_version'NEWLINENEWLINEdef read_version_stamp(fn=get_version_stamp_file()):NEWLINE assert exists(fn), 'version stamp file {} does not exist!'.format(fn)NEWLINE with open(fn, 'r') as fp:NEWLINE return fp.read().strip()NEWLINENEWLINEdef update_version_stamp(version, fn=get_version_stamp_file()):NEWLINE with open(fn, 'w') as fp:NEWLINE fp.write(version + '\n')NEWLINENEWLINEdef wait_for_mysql():NEWLINE db_host = get_conf('DB_HOST', '127.0.0.1')NEWLINE db_user = 'root'NEWLINE db_passwd = get_conf('DB_ROOT_PASSWD', '')NEWLINENEWLINE while True:NEWLINE try:NEWLINE MySQLdb.connect(host=db_host, port=3306, user=db_user, passwd=db_passwd)NEWLINE except Exception as e:NEWLINE print('waiting for mysql server to be ready: %s', e)NEWLINE time.sleep(2)NEWLINE continueNEWLINE logdbg('mysql server is ready')NEWLINE returnNEWLINENEWLINEdef wait_for_nginx():NEWLINE returnNEWLINENEWLINEdef replace_file_pattern(fn, pattern, replacement):NEWLINE with open(fn, 'r') as fp:NEWLINE content = fp.read()NEWLINE with open(fn, 'w') as fp:NEWLINE fp.write(content.replace(pattern, replacement))NEWLINE |
# Copyright 1999-2021 Alibaba Group Holding Ltd.NEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINENEWLINEimport asyncioNEWLINEfrom typing import Dict, List, Optional, Tuple, UnionNEWLINENEWLINEimport numpy as npNEWLINENEWLINEfrom .... import oscar as moNEWLINEfrom ....core import tileNEWLINEfrom ....utils import build_fetchNEWLINEfrom ...core import NodeRoleNEWLINEfrom ...cluster import ClusterAPINEWLINEfrom ...meta import MetaAPINEWLINEfrom ..core import MutableTensorInfoNEWLINEfrom ..utils import (NEWLINE getitem_to_records,NEWLINE setitem_to_records,NEWLINE normalize_name,NEWLINE normalize_timestamp,NEWLINE)NEWLINEfrom ..worker import MutableTensorChunkActorNEWLINENEWLINENEWLINEclass MutableObjectManagerActor(mo.Actor):NEWLINE def __init__(self, session_id: str):NEWLINE self._session_id = session_idNEWLINE self._cluster_api: Optional[ClusterAPI] = NoneNEWLINENEWLINE self._mutable_objects = dict()NEWLINENEWLINE async def __post_create__(self):NEWLINE self._cluster_api = await ClusterAPI.create(self.address)NEWLINENEWLINE async def __pre_destroy__(self):NEWLINE await asyncio.gather(NEWLINE *[mo.destroy_actor(ref) for ref in self._mutable_objects.values()]NEWLINE )NEWLINENEWLINE @classmethodNEWLINE def gen_uid(cls, session_id: str):NEWLINE return f"mutable-object-manager-{session_id}"NEWLINENEWLINE async def create_mutable_tensor(self, *args, name: Optional[str] = None, **kwargs):NEWLINE name = normalize_name(name)NEWLINE if name in self._mutable_objects:NEWLINE raise ValueError(f"Mutable tensor {name} already exists!")NEWLINENEWLINE workers: List[str] = list(NEWLINE await self._cluster_api.get_nodes_info(role=NodeRole.WORKER)NEWLINE )NEWLINENEWLINE tensor_ref = await mo.create_actor(NEWLINE MutableTensorActor,NEWLINE self._session_id,NEWLINE name,NEWLINE workers,NEWLINE *args,NEWLINE **kwargs,NEWLINE address=self.address,NEWLINE uid=MutableTensorActor.gen_uid(self._session_id, name),NEWLINE )NEWLINE self._mutable_objects[name] = tensor_refNEWLINE return tensor_refNEWLINENEWLINE async def get_mutable_tensor(self, name: str):NEWLINE tensor_ref = self._mutable_objects.get(name, None)NEWLINE if tensor_ref is None:NEWLINE raise ValueError(f"Mutable tensor {name} doesn't exist!")NEWLINE return tensor_refNEWLINENEWLINE async def seal_mutable_tensor(self, name: str, timestamp=None):NEWLINE tensor_ref = self._mutable_objects.get(name, None)NEWLINE if tensor_ref is None:NEWLINE raise ValueError(f"Mutable tensor {name} doesn't exist!")NEWLINE tensor = await tensor_ref.seal(timestamp)NEWLINE await mo.destroy_actor(tensor_ref)NEWLINE self._mutable_objects.pop(name)NEWLINE return tensorNEWLINENEWLINENEWLINEclass MutableTensorActor(mo.Actor):NEWLINE def __init__(NEWLINE self,NEWLINE session_id: str,NEWLINE name: str,NEWLINE workers: List[str],NEWLINE shape: Tuple,NEWLINE dtype: Union[np.dtype, str],NEWLINE default_value: Union[int, float] = 0,NEWLINE chunk_size: Union[int, Tuple] = None,NEWLINE ):NEWLINE self._session_id = session_idNEWLINE self._name = nameNEWLINE self._workers = workersNEWLINE self._shape = shapeNEWLINE self._dtype = dtypeNEWLINE self._default_value = default_valueNEWLINE self._chunk_size = chunk_sizeNEWLINENEWLINE self._sealed = FalseNEWLINENEWLINE self._fetch = NoneNEWLINE self._chunk_actors = []NEWLINE # chunk to actor: {chunk index -> actor uid}NEWLINE self._chunk_to_actor: Dict[NEWLINE Tuple, Union[MutableTensorChunkActor, mo.ActorRef]NEWLINE ] = dict()NEWLINENEWLINE async def __post_create__(self):NEWLINE self._meta_api = await MetaAPI.create(self._session_id, self.address)NEWLINENEWLINE # tiling a random tensor to generate keys, but we doesn't actually executeNEWLINE # the random generatorNEWLINE from ....tensor.random import randNEWLINENEWLINE self._fetch = build_fetch(NEWLINE tile(rand(*self._shape, dtype=self._dtype, chunk_size=self._chunk_size))NEWLINE )NEWLINENEWLINE chunk_groups = np.array_split(self._fetch.chunks, len(self._workers))NEWLINE for idx, (worker, chunks) in enumerate(zip(self._workers, chunk_groups)):NEWLINE if len(chunks) == 0:NEWLINE breakNEWLINE chunk_actor_ref = await mo.create_actor(NEWLINE MutableTensorChunkActor,NEWLINE self._session_id,NEWLINE self.address,NEWLINE list(chunks),NEWLINE dtype=self._dtype,NEWLINE default_value=self._default_value,NEWLINE address=worker,NEWLINE uid=MutableTensorChunkActor.gen_uid(self._session_id, self._name, idx),NEWLINE )NEWLINE self._chunk_actors.append(chunk_actor_ref)NEWLINE for chunk in chunks:NEWLINE self._chunk_to_actor[chunk.index] = chunk_actor_refNEWLINENEWLINE async def __pre_destroy__(self):NEWLINE await asyncio.gather(*[mo.destroy_actor(ref) for ref in self._chunk_actors])NEWLINENEWLINE @classmethodNEWLINE def gen_uid(cls, session_id, name):NEWLINE return f"mutable-tensor-{session_id}-{name}"NEWLINENEWLINE async def info(self) -> "MutableTensorInfo":NEWLINE return MutableTensorInfo(NEWLINE self._shape, self._dtype, self._name, self._default_valueNEWLINE )NEWLINENEWLINE @mo.extensibleNEWLINE async def _read_chunk(NEWLINE self, chunk_actor_ref, chunk_index, records, chunk_value_shape, timestampNEWLINE ):NEWLINE return await chunk_actor_ref.read(NEWLINE chunk_index, records, chunk_value_shape, timestampNEWLINE )NEWLINENEWLINE async def read(self, index, timestamp=None):NEWLINE """NEWLINE Read value from mutable tensor.NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE index:NEWLINE Index to read from the tensor.NEWLINENEWLINE timestamp: optionalNEWLINE Timestamp to read value that happened before then.NEWLINE """NEWLINE timestamp = normalize_timestamp(timestamp)NEWLINE records, output_shape = getitem_to_records(self._fetch, index)NEWLINENEWLINE read_tasks, chunk_indices = [], []NEWLINE for chunk_index, (records, chunk_value_shape, indices) in records.items():NEWLINE chunk_actor_ref = self._chunk_to_actor[chunk_index]NEWLINE read_tasks.append(NEWLINE self._read_chunk.delay(NEWLINE chunk_actor_ref, chunk_index, records, chunk_value_shape, timestampNEWLINE )NEWLINE )NEWLINE chunk_indices.append(indices)NEWLINE chunks = await self._read_chunk.batch(*read_tasks)NEWLINE result = np.full(output_shape, fill_value=self._default_value)NEWLINE for chunk, indices in zip(chunks, chunk_indices):NEWLINE result[indices] = chunkNEWLINE return resultNEWLINENEWLINE @mo.extensibleNEWLINE async def _write_chunk(self, chunk_actor_ref, chunk_index, records):NEWLINE await chunk_actor_ref.write(chunk_index, records)NEWLINENEWLINE async def write(self, index, value, timestamp=None):NEWLINE """NEWLINE Write value to mutable tensor.NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE index:NEWLINE Index to write to the tensor.NEWLINENEWLINE value:NEWLINE The value that will be filled into the mutable tensor according to `index`.NEWLINENEWLINE timestamp: optionalNEWLINE Timestamp to associated with the newly touched value.NEWLINE """NEWLINE timestamp = normalize_timestamp(timestamp)NEWLINE records = setitem_to_records(self._fetch, index, value, timestamp)NEWLINENEWLINE write_tasks = []NEWLINE for chunk_index, records in records.items():NEWLINE chunk_actor_ref = self._chunk_to_actor[chunk_index]NEWLINE write_tasks.append(NEWLINE self._write_chunk.delay(chunk_actor_ref, chunk_index, records)NEWLINE )NEWLINE await self._write_chunk.batch(*write_tasks)NEWLINENEWLINE @mo.extensibleNEWLINE async def _seal_chunk(self, chunk_actor_ref, timestamp):NEWLINE await chunk_actor_ref.seal(timestamp)NEWLINENEWLINE async def seal(self, timestamp=None):NEWLINE if self._sealed:NEWLINE return self._fetchNEWLINENEWLINE timestamp = normalize_timestamp(timestamp)NEWLINE self._sealed = TrueNEWLINE seal_tasks = []NEWLINE for chunk_actor_ref in self._chunk_actors:NEWLINE seal_tasks.append(self._seal_chunk.delay(chunk_actor_ref, timestamp))NEWLINE await self._seal_chunk.batch(*seal_tasks)NEWLINE self._chunk_actors = []NEWLINE return self._fetchNEWLINE |
# -*- coding: utf-8 -*-NEWLINE"""NEWLINENEWLINEScript Name: ProFile.pyNEWLINEAuthor: Do Trinh/Jimmy - 3D artist.NEWLINENEWLINEDescription:NEWLINENEWLINE"""NEWLINE# -------------------------------------------------------------------------------------------------------------NEWLINENEWLINEfrom pyPLM.Widgets import GroupGrid, LineEdit, Button, LabelNEWLINENEWLINEclass Profile(GroupGrid):NEWLINENEWLINE key = 'ProFile'NEWLINENEWLINE def __init__(self, parent=None):NEWLINE super(Profile, self).__init__(parent=parent)NEWLINENEWLINE self.parent = parentNEWLINENEWLINE self.layout.addWidget(Label({'txt': 'First Name'}), 0, 0, 1, 2)NEWLINE self.layout.addWidget(Label({'txt': 'Last Name'}), 1, 0, 1, 2)NEWLINE self.layout.addWidget(Label({'txt': 'Your Title'}), 2, 0, 1, 2)NEWLINE self.layout.addWidget(Label({'txt': 'Email'}), 3, 0, 1, 2)NEWLINE self.layout.addWidget(Label({'txt': 'Phone Number'}), 4, 0, 1, 2)NEWLINENEWLINE self.firstnameField = LineEdit()NEWLINE self.lastnameField = LineEdit()NEWLINE self.titleField = LineEdit()NEWLINE self.emailField = LineEdit()NEWLINE self.phoneField = LineEdit()NEWLINENEWLINE self.changeBtn = Button({'txt': "Update Profile", 'cl': self.update_profile})NEWLINENEWLINE self.layout.addWidget(self.firstnameField, 0, 2, 1, 4)NEWLINE self.layout.addWidget(self.lastnameField, 1, 2, 1, 4)NEWLINE self.layout.addWidget(self.titleField, 2, 2, 1, 4)NEWLINE self.layout.addWidget(self.emailField, 3, 2, 1, 4)NEWLINE self.layout.addWidget(self.phoneField, 4, 2, 1, 4)NEWLINE self.layout.addWidget(self.changeBtn, 5, 0, 1, 6)NEWLINENEWLINE def update_profile(self):NEWLINE passNEWLINENEWLINE# -------------------------------------------------------------------------------------------------------------NEWLINE# Created by panda on 28/11/2019 - 7:49 PMNEWLINE# © 2017 - 2018 DAMGteam. All rights reserved |
from typing import ListNEWLINENEWLINENEWLINEclass InvoiceProfiles:NEWLINE def __init__(self, invoice_profiles: List[str]):NEWLINE self.invoice_profiles = invoice_profilesNEWLINENEWLINE def as_list(self) -> List:NEWLINE return [{"value": invoice_profile} for invoice_profile in self.invoice_profiles]NEWLINE |
import loggingNEWLINEimport mathNEWLINEimport osNEWLINEimport pickleNEWLINEimport sysNEWLINEimport timeNEWLINENEWLINEimport psutilNEWLINENEWLINEfrom .catboost_utils import construct_custom_catboost_metricNEWLINEfrom .hyperparameters.parameters import get_param_baselineNEWLINEfrom .hyperparameters.searchspaces import get_default_searchspaceNEWLINEfrom ..abstract.abstract_model import AbstractModelNEWLINEfrom ...constants import PROBLEM_TYPES_CLASSIFICATION, MULTICLASSNEWLINEfrom ....utils.exceptions import NotEnoughMemoryError, TimeLimitExceededNEWLINEfrom .....try_import import try_import_catboostNEWLINENEWLINElogger = logging.getLogger(__name__)NEWLINENEWLINENEWLINE# TODO: Catboost crashes on multiclass problems where only two classes have significant member count.NEWLINE# Question: Do we turn these into binary classification and then convert to multiclass output in Learner? This would make the most sense.NEWLINE# TODO: Consider having Catboost variant that converts all categoricals to numerical as done in RFModel, was showing improved results in some problems.NEWLINEclass CatboostModel(AbstractModel):NEWLINE def __init__(self, path: str, name: str, problem_type: str, objective_func, stopping_metric=None, num_classes=None, hyperparameters=None, features=None, debug=0, **kwargs):NEWLINE super().__init__(path=path, name=name, problem_type=problem_type, objective_func=objective_func, stopping_metric=stopping_metric, num_classes=num_classes, hyperparameters=hyperparameters, features=features, debug=debug, **kwargs)NEWLINE try_import_catboost()NEWLINE from catboost import CatBoostClassifier, CatBoostRegressorNEWLINE self.model_type = CatBoostClassifier if problem_type in PROBLEM_TYPES_CLASSIFICATION else CatBoostRegressorNEWLINE if isinstance(self.params['eval_metric'], str):NEWLINE self.metric_name = self.params['eval_metric']NEWLINE else:NEWLINE self.metric_name = type(self.params['eval_metric']).__name__NEWLINENEWLINE def _set_default_params(self):NEWLINE default_params = get_param_baseline(problem_type=self.problem_type)NEWLINE for param, val in default_params.items():NEWLINE self._set_default_param_value(param, val)NEWLINE self._set_default_param_value('random_seed', 0) # Remove randomness for reproducibilityNEWLINE self._set_default_param_value('eval_metric', construct_custom_catboost_metric(self.stopping_metric, True, not self.stopping_metric_needs_y_pred, self.problem_type))NEWLINE # Set 'allow_writing_files' to True in order to keep log files created by catboost during training (these will be saved in the directory where AutoGluon stores this model)NEWLINE self._set_default_param_value('allow_writing_files', False) # Disables creation of catboost logging files during training by defaultNEWLINENEWLINE def _get_default_searchspace(self):NEWLINE return get_default_searchspace(self.problem_type, num_classes=self.num_classes)NEWLINENEWLINE def preprocess(self, X):NEWLINE X = super().preprocess(X)NEWLINE categoricals = list(X.select_dtypes(include='category').columns)NEWLINE if categoricals:NEWLINE X = X.copy()NEWLINE for category in categoricals:NEWLINE current_categories = X[category].cat.categoriesNEWLINE if '__NaN__' in current_categories:NEWLINE X[category] = X[category].fillna('__NaN__')NEWLINE else:NEWLINE X[category] = X[category].cat.add_categories('__NaN__').fillna('__NaN__')NEWLINE return XNEWLINENEWLINE # TODO: Use Pool in preprocess, optimize bagging to do Pool.split() to avoid re-computing pool for each fold! Requires stateful + yNEWLINE # Pool is much more memory efficient, avoids copying data twice in memoryNEWLINE def fit(self, X_train, Y_train, X_test=None, Y_test=None, time_limit=None, **kwargs):NEWLINE from catboost import PoolNEWLINE num_rows_train = len(X_train)NEWLINE num_cols_train = len(X_train.columns)NEWLINE if self.problem_type == MULTICLASS:NEWLINE if self.num_classes is not None:NEWLINE num_classes = self.num_classesNEWLINE else:NEWLINE num_classes = 10 # Guess if not given, can do better by looking at y_trainNEWLINE else:NEWLINE num_classes = 1NEWLINENEWLINE # TODO: Add ignore_memory_limits param to disable NotEnoughMemoryError ExceptionsNEWLINE max_memory_usage_ratio = self.params_aux['max_memory_usage_ratio']NEWLINE approx_mem_size_req = num_rows_train * num_cols_train * num_classes / 2 # TODO: Extremely crude approximation, can be vastly improvedNEWLINE if approx_mem_size_req > 1e9: # > 1 GBNEWLINE available_mem = psutil.virtual_memory().availableNEWLINE ratio = approx_mem_size_req / available_memNEWLINE if ratio > (1 * max_memory_usage_ratio):NEWLINE logger.warning('\tWarning: Not enough memory to safely train CatBoost model, roughly requires: %s GB, but only %s GB is available...' % (round(approx_mem_size_req / 1e9, 3), round(available_mem / 1e9, 3)))NEWLINE raise NotEnoughMemoryErrorNEWLINE elif ratio > (0.2 * max_memory_usage_ratio):NEWLINE logger.warning('\tWarning: Potentially not enough memory to safely train CatBoost model, roughly requires: %s GB, but only %s GB is available...' % (round(approx_mem_size_req / 1e9, 3), round(available_mem / 1e9, 3)))NEWLINENEWLINE start_time = time.time()NEWLINE X_train = self.preprocess(X_train)NEWLINE cat_features = list(X_train.select_dtypes(include='category').columns)NEWLINE X_train = Pool(data=X_train, label=Y_train, cat_features=cat_features)NEWLINENEWLINE if X_test is not None:NEWLINE X_test = self.preprocess(X_test)NEWLINE X_test = Pool(data=X_test, label=Y_test, cat_features=cat_features)NEWLINE eval_set = X_testNEWLINE if num_rows_train <= 10000:NEWLINE modifier = 1NEWLINE else:NEWLINE modifier = 10000/num_rows_trainNEWLINE early_stopping_rounds = max(round(modifier*150), 10)NEWLINE num_sample_iter_max = max(round(modifier*50), 2)NEWLINE else:NEWLINE eval_set = NoneNEWLINE early_stopping_rounds = NoneNEWLINE num_sample_iter_max = 50NEWLINENEWLINE invalid_params = ['num_threads', 'num_gpus']NEWLINE for invalid in invalid_params:NEWLINE if invalid in self.params:NEWLINE self.params.pop(invalid)NEWLINE train_dir = NoneNEWLINE if 'allow_writing_files' in self.params and self.params['allow_writing_files']:NEWLINE if 'train_dir' not in self.params:NEWLINE try:NEWLINE # TODO: What if path is in S3?NEWLINE os.makedirs(os.path.dirname(self.path), exist_ok=True)NEWLINE except:NEWLINE passNEWLINE else:NEWLINE train_dir = self.path + 'catboost_info'NEWLINE logger.log(15, f'\tCatboost model hyperparameters: {self.params}')NEWLINENEWLINE # TODO: Add more control over these params (specifically early_stopping_rounds)NEWLINE verbosity = kwargs.get('verbosity', 2)NEWLINE if verbosity <= 1:NEWLINE verbose = FalseNEWLINE elif verbosity == 2:NEWLINE verbose = FalseNEWLINE elif verbosity == 3:NEWLINE verbose = 20NEWLINE else:NEWLINE verbose = TrueNEWLINENEWLINE init_model = NoneNEWLINE init_model_tree_count = NoneNEWLINE init_model_best_iteration = NoneNEWLINE init_model_best_score = NoneNEWLINENEWLINE params = self.params.copy()NEWLINE num_features = len(self.features)NEWLINE if self.problem_type == MULTICLASS and 'rsm' not in params and 'colsample_bylevel' not in params and num_features > 1000:NEWLINE if time_limit:NEWLINE # Reduce sample iterations to avoid taking unreasonable amounts of timeNEWLINE num_sample_iter_max = max(round(num_sample_iter_max/2), 2)NEWLINE # Subsample columns to speed up trainingNEWLINE params['colsample_bylevel'] = max(min(1.0, 1000 / num_features), 0.05)NEWLINE logger.log(30, f'\tMany features detected ({num_features}), dynamically setting \'colsample_bylevel\' to {params["colsample_bylevel"]} to speed up training (Default = 1).')NEWLINE logger.log(30, f'\tTo disable this functionality, explicitly specify \'colsample_bylevel\' in the model hyperparameters.')NEWLINENEWLINE if time_limit:NEWLINE time_left_start = time_limit - (time.time() - start_time)NEWLINE if time_left_start <= time_limit * 0.4: # if 60% of time was spent preprocessing, likely not enough time to train modelNEWLINE raise TimeLimitExceededNEWLINE params_init = params.copy()NEWLINE num_sample_iter = min(num_sample_iter_max, params_init['iterations'])NEWLINE params_init['iterations'] = num_sample_iterNEWLINE if train_dir is not None:NEWLINE params_init['train_dir'] = train_dirNEWLINE self.model = self.model_type(NEWLINE **params_init,NEWLINE )NEWLINE self.model.fit(NEWLINE X_train,NEWLINE eval_set=eval_set,NEWLINE use_best_model=True,NEWLINE verbose=verbose,NEWLINE # early_stopping_rounds=early_stopping_rounds,NEWLINE )NEWLINENEWLINE init_model_tree_count = self.model.tree_count_NEWLINE init_model_best_iteration = self.model.get_best_iteration()NEWLINE init_model_best_score = self.model.get_best_score()['validation'][self.metric_name]NEWLINENEWLINE time_left_end = time_limit - (time.time() - start_time)NEWLINE time_taken_per_iter = (time_left_start - time_left_end) / num_sample_iterNEWLINE estimated_iters_in_time = round(time_left_end / time_taken_per_iter)NEWLINE init_model = self.modelNEWLINENEWLINE params_final = params.copy()NEWLINENEWLINE # TODO: This only handles memory with time_limits specified, but not with time_limits=None, handle when time_limits=NoneNEWLINE available_mem = psutil.virtual_memory().availableNEWLINE model_size_bytes = sys.getsizeof(pickle.dumps(self.model))NEWLINENEWLINE max_memory_proportion = 0.3 * max_memory_usage_ratioNEWLINE mem_usage_per_iter = model_size_bytes / num_sample_iterNEWLINE max_memory_iters = math.floor(available_mem * max_memory_proportion / mem_usage_per_iter)NEWLINENEWLINE params_final['iterations'] = min(params['iterations'] - num_sample_iter, estimated_iters_in_time)NEWLINE if params_final['iterations'] > max_memory_iters - num_sample_iter:NEWLINE if max_memory_iters - num_sample_iter <= 500:NEWLINE logger.warning('\tWarning: CatBoost will be early stopped due to lack of memory, increase memory to enable full quality models, max training iterations changed to %s from %s' % (max_memory_iters - num_sample_iter, params_final['iterations']))NEWLINE params_final['iterations'] = max_memory_iters - num_sample_iterNEWLINE else:NEWLINE params_final = params.copy()NEWLINENEWLINE if train_dir is not None:NEWLINE params_final['train_dir'] = train_dirNEWLINE if params_final['iterations'] > 0:NEWLINE self.model = self.model_type(NEWLINE **params_final,NEWLINE )NEWLINENEWLINE # TODO: Strangely, this performs different if clone init_model is sent in than if trained for same total number of iterations. May be able to optimize catboost models further with thisNEWLINE self.model.fit(NEWLINE X_train,NEWLINE eval_set=eval_set,NEWLINE verbose=verbose,NEWLINE early_stopping_rounds=early_stopping_rounds,NEWLINE # use_best_model=True,NEWLINE init_model=init_model,NEWLINE )NEWLINENEWLINE if init_model is not None:NEWLINE final_model_best_score = self.model.get_best_score()['validation'][self.metric_name]NEWLINE if self.stopping_metric._optimum > final_model_best_score:NEWLINE if final_model_best_score > init_model_best_score:NEWLINE best_iteration = init_model_tree_count + self.model.get_best_iteration()NEWLINE else:NEWLINE best_iteration = init_model_best_iterationNEWLINE else:NEWLINE if final_model_best_score < init_model_best_score:NEWLINE best_iteration = init_model_tree_count + self.model.get_best_iteration()NEWLINE else:NEWLINE best_iteration = init_model_best_iterationNEWLINENEWLINE self.model.shrink(ntree_start=0, ntree_end=best_iteration+1)NEWLINENEWLINE self.params_trained['iterations'] = self.model.tree_count_NEWLINENEWLINE def get_model_feature_importance(self):NEWLINE importance_df = self.model.get_feature_importance(prettified=True)NEWLINE importance_df['Importances'] = importance_df['Importances'] / 100NEWLINE importance_series = importance_df.set_index('Feature Id')['Importances']NEWLINE importance_dict = importance_series.to_dict()NEWLINE return importance_dictNEWLINE |
from django.contrib import adminNEWLINEfrom .models import ItemNEWLINENEWLINE# Register your models here.NEWLINEadmin.site.register(Item) |
"""Labware state store tests."""NEWLINEimport pytestNEWLINEfrom collections import OrderedDictNEWLINEfrom contextlib import nullcontext as does_not_raiseNEWLINEfrom typing import List, NamedTuple, Optional, Sequence, Tuple, Type, UnionNEWLINENEWLINEfrom opentrons.protocol_engine import EngineStatus, commands as cmd, errorsNEWLINEfrom opentrons.protocol_engine.state.commands import CommandState, CommandViewNEWLINEfrom opentrons.protocol_engine.actions import PlayAction, PauseActionNEWLINENEWLINEfrom .command_fixtures import (NEWLINE create_pending_command,NEWLINE create_running_command,NEWLINE create_failed_command,NEWLINE create_completed_command,NEWLINE)NEWLINENEWLINENEWLINEdef get_command_view(NEWLINE is_running: bool = False,NEWLINE stop_requested: bool = False,NEWLINE commands_by_id: Sequence[Tuple[str, cmd.Command]] = (),NEWLINE) -> CommandView:NEWLINE """Get a command view test subject."""NEWLINE state = CommandState(NEWLINE is_running=is_running,NEWLINE stop_requested=stop_requested,NEWLINE commands_by_id=OrderedDict(commands_by_id),NEWLINE )NEWLINENEWLINE return CommandView(state=state)NEWLINENEWLINENEWLINEdef test_get_by_id() -> None:NEWLINE """It should get a command by ID from state."""NEWLINE command = create_completed_command(command_id="command-id")NEWLINE subject = get_command_view(commands_by_id=[("command-id", command)])NEWLINENEWLINE assert subject.get("command-id") == commandNEWLINENEWLINENEWLINEdef test_get_command_bad_id() -> None:NEWLINE """It should raise if a requested command ID isn't in state."""NEWLINE command = create_completed_command(command_id="command-id")NEWLINE subject = get_command_view(commands_by_id=[("command-id", command)])NEWLINENEWLINE with pytest.raises(errors.CommandDoesNotExistError):NEWLINE subject.get("asdfghjkl")NEWLINENEWLINENEWLINEdef test_get_all() -> None:NEWLINE """It should get all the commands from the state."""NEWLINE command_1 = create_completed_command(command_id="command-id-1")NEWLINE command_2 = create_running_command(command_id="command-id-2")NEWLINE command_3 = create_pending_command(command_id="command-id-3")NEWLINENEWLINE subject = get_command_view(NEWLINE commands_by_id=[NEWLINE ("command-id-1", command_1),NEWLINE ("command-id-2", command_2),NEWLINE ("command-id-3", command_3),NEWLINE ]NEWLINE )NEWLINENEWLINE assert subject.get_all() == [command_1, command_2, command_3]NEWLINENEWLINENEWLINEdef test_get_next_queued_returns_first_pending() -> None:NEWLINE """It should return the first command that's pending."""NEWLINE pending_command = create_pending_command()NEWLINE running_command = create_running_command()NEWLINE completed_command = create_completed_command()NEWLINENEWLINE subject = get_command_view(NEWLINE is_running=True,NEWLINE commands_by_id=[NEWLINE ("command-id-1", running_command),NEWLINE ("command-id-2", completed_command),NEWLINE ("command-id-3", pending_command),NEWLINE ("command-id-4", pending_command),NEWLINE ],NEWLINE )NEWLINENEWLINE assert subject.get_next_queued() == "command-id-3"NEWLINENEWLINENEWLINEdef test_get_next_queued_returns_none_when_no_pending() -> None:NEWLINE """It should return None if there are no pending commands to return."""NEWLINE running_command = create_running_command(command_id="command-id-1")NEWLINE completed_command = create_completed_command(command_id="command-id-2")NEWLINENEWLINE subject = get_command_view(is_running=True)NEWLINENEWLINE assert subject.get_next_queued() is NoneNEWLINENEWLINE subject = get_command_view(NEWLINE is_running=True,NEWLINE commands_by_id=[NEWLINE ("command-id-1", running_command),NEWLINE ("command-id-2", completed_command),NEWLINE ],NEWLINE )NEWLINENEWLINE assert subject.get_next_queued() is NoneNEWLINENEWLINENEWLINEdef test_get_next_queued_returns_none_if_not_running() -> None:NEWLINE """It should return None if the engine is not running."""NEWLINE pending_command = create_pending_command()NEWLINENEWLINE subject = get_command_view(NEWLINE is_running=False,NEWLINE commands_by_id=[("command-id", pending_command)],NEWLINE )NEWLINE result = subject.get_next_queued()NEWLINENEWLINE assert result is NoneNEWLINENEWLINENEWLINEdef test_get_next_queued_raises_when_earlier_command_failed() -> None:NEWLINE """It should raise if any prior-added command is failed."""NEWLINE running_command = create_running_command(command_id="command-id-1")NEWLINE completed_command = create_completed_command(command_id="command-id-2")NEWLINE failed_command = create_failed_command(command_id="command-id-3")NEWLINE pending_command = create_pending_command(command_id="command-id-4")NEWLINENEWLINE subject = get_command_view(NEWLINE is_running=True,NEWLINE commands_by_id=[NEWLINE ("command-id-1", running_command),NEWLINE ("command-id-2", completed_command),NEWLINE ("command-id-3", failed_command),NEWLINE ("command-id-4", pending_command),NEWLINE ],NEWLINE )NEWLINENEWLINE with pytest.raises(errors.ProtocolEngineStoppedError):NEWLINE subject.get_next_queued()NEWLINENEWLINENEWLINEdef test_get_next_queued_raises_if_stopped() -> None:NEWLINE """It should raise if an engine stop has been requested."""NEWLINE subject = get_command_view(stop_requested=True)NEWLINENEWLINE with pytest.raises(errors.ProtocolEngineStoppedError):NEWLINE subject.get_next_queued()NEWLINENEWLINENEWLINEdef test_get_is_running() -> None:NEWLINE """It should be able to get if the engine is running."""NEWLINE subject = get_command_view(is_running=False)NEWLINE assert subject.get_is_running() is FalseNEWLINENEWLINE subject = get_command_view(is_running=True)NEWLINE assert subject.get_is_running() is TrueNEWLINENEWLINENEWLINEdef test_get_is_complete() -> None:NEWLINE """It should be able to tell if a command is complete."""NEWLINE completed_command = create_completed_command(command_id="command-id-1")NEWLINE running_command = create_running_command(command_id="command-id-2")NEWLINE pending_command = create_pending_command(command_id="command-id-3")NEWLINENEWLINE subject = get_command_view(NEWLINE commands_by_id=[NEWLINE ("command-id-1", completed_command),NEWLINE ("command-id-2", running_command),NEWLINE ("command-id-3", pending_command),NEWLINE ]NEWLINE )NEWLINENEWLINE assert subject.get_is_complete("command-id-1") is TrueNEWLINE assert subject.get_is_complete("command-id-2") is FalseNEWLINE assert subject.get_is_complete("command-id-3") is FalseNEWLINENEWLINENEWLINEdef test_get_is_complete_with_failed_command() -> None:NEWLINE """It should return true if a given command will never be executed."""NEWLINE failed_command = create_failed_command(command_id="command-id-1")NEWLINE pending_command = create_pending_command(command_id="command-id-2")NEWLINENEWLINE subject = get_command_view(NEWLINE commands_by_id=[NEWLINE ("command-id-1", failed_command),NEWLINE ("command-id-2", pending_command),NEWLINE ]NEWLINE )NEWLINENEWLINE assert subject.get_is_complete("command-id-1") is TrueNEWLINE assert subject.get_is_complete("command-id-2") is TrueNEWLINENEWLINENEWLINEdef test_get_all_complete() -> None:NEWLINE """It should return true if all commands completed or any failed."""NEWLINE completed_command = create_completed_command(command_id="command-id-1")NEWLINE running_command = create_running_command(command_id="command-id-2")NEWLINE pending_command = create_pending_command(command_id="command-id-3")NEWLINE failed_command = create_failed_command(command_id="command-id-4")NEWLINENEWLINE subject = get_command_view(NEWLINE commands_by_id=[NEWLINE ("command-id-4", failed_command),NEWLINE ("command-id-3", pending_command),NEWLINE ],NEWLINE )NEWLINENEWLINE assert subject.get_all_complete() is TrueNEWLINENEWLINE subject = get_command_view(NEWLINE commands_by_id=[NEWLINE ("command-id-1", completed_command),NEWLINE ("command-id-2", running_command),NEWLINE ("command-id-3", pending_command),NEWLINE ],NEWLINE )NEWLINENEWLINE assert subject.get_all_complete() is FalseNEWLINENEWLINE subject = get_command_view(NEWLINE commands_by_id=[NEWLINE ("command-id-1", completed_command),NEWLINE ("command-id-2", completed_command),NEWLINE ],NEWLINE )NEWLINENEWLINE assert subject.get_all_complete() is TrueNEWLINENEWLINENEWLINEdef test_get_stop_requested() -> None:NEWLINE """It should return true if the stop_requested flag is set."""NEWLINE subject = get_command_view(stop_requested=True)NEWLINE assert subject.get_stop_requested() is TrueNEWLINENEWLINE subject = get_command_view(stop_requested=False)NEWLINE assert subject.get_stop_requested() is FalseNEWLINENEWLINENEWLINEdef test_get_is_stopped() -> None:NEWLINE """It should return true if stop requested and no command running."""NEWLINE completed_command = create_completed_command(command_id="command-id-1")NEWLINE running_command = create_running_command(command_id="command-id-2")NEWLINE pending_command = create_pending_command(command_id="command-id-3")NEWLINE failed_command = create_failed_command(command_id="command-id-4")NEWLINENEWLINE subject = get_command_view(NEWLINE stop_requested=False,NEWLINE commands_by_id=(),NEWLINE )NEWLINE assert subject.get_is_stopped() is FalseNEWLINENEWLINE subject = get_command_view(NEWLINE stop_requested=True,NEWLINE commands_by_id=[("command-id-2", running_command)],NEWLINE )NEWLINE assert subject.get_is_stopped() is FalseNEWLINENEWLINE subject = get_command_view(NEWLINE stop_requested=True,NEWLINE commands_by_id=[NEWLINE ("command-id-1", completed_command),NEWLINE ("command-id-3", pending_command),NEWLINE ("command-id-4", failed_command),NEWLINE ],NEWLINE )NEWLINE assert subject.get_is_stopped() is TrueNEWLINENEWLINENEWLINEclass ActionAllowedSpec(NamedTuple):NEWLINE """Spec data to test CommandView.validate_action_allowed."""NEWLINENEWLINE subject: CommandViewNEWLINE action: Union[PlayAction, PauseAction]NEWLINE expected_error: Optional[Type[errors.ProtocolEngineError]]NEWLINENEWLINENEWLINEaction_allowed_specs: List[ActionAllowedSpec] = [NEWLINE ActionAllowedSpec(NEWLINE subject=get_command_view(stop_requested=False, is_running=False),NEWLINE action=PlayAction(),NEWLINE expected_error=None,NEWLINE ),NEWLINE ActionAllowedSpec(NEWLINE subject=get_command_view(stop_requested=False, is_running=True),NEWLINE action=PlayAction(),NEWLINE expected_error=None,NEWLINE ),NEWLINE ActionAllowedSpec(NEWLINE subject=get_command_view(stop_requested=True, is_running=False),NEWLINE action=PlayAction(),NEWLINE expected_error=errors.ProtocolEngineStoppedError,NEWLINE ),NEWLINE ActionAllowedSpec(NEWLINE subject=get_command_view(stop_requested=False, is_running=False),NEWLINE action=PauseAction(),NEWLINE expected_error=None,NEWLINE ),NEWLINE ActionAllowedSpec(NEWLINE subject=get_command_view(stop_requested=False, is_running=True),NEWLINE action=PauseAction(),NEWLINE expected_error=None,NEWLINE ),NEWLINE ActionAllowedSpec(NEWLINE subject=get_command_view(stop_requested=True, is_running=False),NEWLINE action=PauseAction(),NEWLINE expected_error=errors.ProtocolEngineStoppedError,NEWLINE ),NEWLINE][email protected](ActionAllowedSpec._fields, action_allowed_specs)NEWLINEdef test_validate_action_allowed(NEWLINE subject: CommandView,NEWLINE action: Union[PlayAction, PauseAction],NEWLINE expected_error: Optional[Type[errors.ProtocolEngineError]],NEWLINE) -> None:NEWLINE """It should validate allowed play/pause actions."""NEWLINE expectation = pytest.raises(expected_error) if expected_error else does_not_raise()NEWLINENEWLINE with expectation: # type: ignore[attr-defined]NEWLINE subject.validate_action_allowed(action)NEWLINENEWLINENEWLINEclass GetStatusSpec(NamedTuple):NEWLINE """Spec data for get_status tests."""NEWLINENEWLINE subject: CommandViewNEWLINE expected_status: EngineStatusNEWLINENEWLINENEWLINEget_status_specs: List[GetStatusSpec] = [NEWLINE GetStatusSpec(NEWLINE subject=get_command_view(NEWLINE is_running=False,NEWLINE stop_requested=False,NEWLINE commands_by_id=[],NEWLINE ),NEWLINE expected_status=EngineStatus.READY_TO_RUN,NEWLINE ),NEWLINE GetStatusSpec(NEWLINE subject=get_command_view(NEWLINE is_running=False,NEWLINE stop_requested=False,NEWLINE commands_by_id=[("command-id", create_pending_command())],NEWLINE ),NEWLINE expected_status=EngineStatus.READY_TO_RUN,NEWLINE ),NEWLINE GetStatusSpec(NEWLINE subject=get_command_view(NEWLINE is_running=False,NEWLINE stop_requested=False,NEWLINE commands_by_id=[("command-id", create_running_command())],NEWLINE ),NEWLINE expected_status=EngineStatus.PAUSE_REQUESTED,NEWLINE ),NEWLINE GetStatusSpec(NEWLINE subject=get_command_view(NEWLINE is_running=False,NEWLINE stop_requested=False,NEWLINE commands_by_id=[NEWLINE ("command-id-1", create_completed_command()),NEWLINE ("command-id-2", create_pending_command()),NEWLINE ],NEWLINE ),NEWLINE expected_status=EngineStatus.PAUSED,NEWLINE ),NEWLINE GetStatusSpec(NEWLINE subject=get_command_view(NEWLINE is_running=True,NEWLINE stop_requested=False,NEWLINE commands_by_id=[],NEWLINE ),NEWLINE expected_status=EngineStatus.RUNNING,NEWLINE ),NEWLINE GetStatusSpec(NEWLINE subject=get_command_view(NEWLINE is_running=True,NEWLINE stop_requested=False,NEWLINE commands_by_id=[("command-id", create_failed_command())],NEWLINE ),NEWLINE expected_status=EngineStatus.FAILED,NEWLINE ),NEWLINE GetStatusSpec(NEWLINE subject=get_command_view(NEWLINE is_running=False,NEWLINE stop_requested=False,NEWLINE commands_by_id=[("command-id", create_failed_command())],NEWLINE ),NEWLINE expected_status=EngineStatus.FAILED,NEWLINE ),NEWLINE GetStatusSpec(NEWLINE subject=get_command_view(NEWLINE is_running=False,NEWLINE stop_requested=True,NEWLINE commands_by_id=[("command-id", create_failed_command())],NEWLINE ),NEWLINE expected_status=EngineStatus.STOPPED,NEWLINE ),NEWLINE GetStatusSpec(NEWLINE subject=get_command_view(NEWLINE is_running=False,NEWLINE stop_requested=True,NEWLINE commands_by_id=[],NEWLINE ),NEWLINE expected_status=EngineStatus.SUCCEEDED,NEWLINE ),NEWLINE GetStatusSpec(NEWLINE subject=get_command_view(NEWLINE is_running=False,NEWLINE stop_requested=True,NEWLINE commands_by_id=[("command-id", create_completed_command())],NEWLINE ),NEWLINE expected_status=EngineStatus.SUCCEEDED,NEWLINE ),NEWLINE GetStatusSpec(NEWLINE subject=get_command_view(NEWLINE is_running=False,NEWLINE stop_requested=True,NEWLINE commands_by_id=[("command-id", create_running_command())],NEWLINE ),NEWLINE expected_status=EngineStatus.STOP_REQUESTED,NEWLINE ),NEWLINE GetStatusSpec(NEWLINE subject=get_command_view(NEWLINE is_running=False,NEWLINE stop_requested=True,NEWLINE commands_by_id=[NEWLINE ("command-id", create_completed_command()),NEWLINE ("command-id", create_pending_command()),NEWLINE ],NEWLINE ),NEWLINE expected_status=EngineStatus.STOPPED,NEWLINE ),NEWLINE][email protected](GetStatusSpec._fields, get_status_specs)NEWLINEdef test_get_status(subject: CommandView, expected_status: EngineStatus) -> None:NEWLINE """It should set a status according to the command queue and running flag.NEWLINENEWLINE 1. Not running, no stop requested, only queued commands: READY_TO_RUNNEWLINE 2. Running, no stop requested, no failed commands: RUNNINGNEWLINE 3. Not running, no stop requested, command still running: PAUSE_REQUESTEDNEWLINE 4. Not running, no stop requested, no running commands: PAUSEDNEWLINE 5. Stop requested, command still running: STOP_REQUESTEDNEWLINE 6. Stop requested, no running commands, with queued commands: STOPPEDNEWLINE 7. Stop requested, all commands succeeded: SUCCEEDEDNEWLINE 8. No stop requested, any failed commands: FAILEDNEWLINE """NEWLINE assert subject.get_status() == expected_statusNEWLINE |
import ipaddressNEWLINEimport structNEWLINEimport pytestNEWLINENEWLINEfrom mitmproxy import dnsNEWLINEfrom mitmproxy import flowfilterNEWLINEfrom mitmproxy.test import tflowNEWLINEfrom mitmproxy.test import tutilsNEWLINENEWLINENEWLINEclass TestResourceRecord:NEWLINENEWLINE def test_str(self):NEWLINE assert str(dns.ResourceRecord.A("test", ipaddress.IPv4Address("1.2.3.4"))) == "1.2.3.4"NEWLINE assert str(dns.ResourceRecord.AAAA("test", ipaddress.IPv6Address("::1"))) == "::1"NEWLINE assert str(dns.ResourceRecord.CNAME("test", "some.other.host")) == "some.other.host"NEWLINE assert str(dns.ResourceRecord.PTR("test", "some.other.host")) == "some.other.host"NEWLINE assert str(dns.ResourceRecord.TXT("test", "unicode text 😀")) == "unicode text 😀"NEWLINE assert str(dns.ResourceRecord("test", dns.types.A, dns.classes.IN, dns.ResourceRecord.DEFAULT_TTL, b'')) == "0x (invalid A data)"NEWLINE assert str(NEWLINE dns.ResourceRecord("test", dns.types.SOA, dns.classes.IN, dns.ResourceRecord.DEFAULT_TTL, b'\x00\x01\x02\x03')NEWLINE ) == "0x00010203"NEWLINENEWLINE def test_setter(self):NEWLINE rr = dns.ResourceRecord("test", dns.types.ANY, dns.classes.IN, dns.ResourceRecord.DEFAULT_TTL, b'')NEWLINE rr.ipv4_address = ipaddress.IPv4Address("8.8.4.4")NEWLINE assert rr.ipv4_address == ipaddress.IPv4Address("8.8.4.4")NEWLINE rr.ipv6_address = ipaddress.IPv6Address("2001:4860:4860::8844")NEWLINE assert rr.ipv6_address == ipaddress.IPv6Address("2001:4860:4860::8844")NEWLINE rr.domain_name = "www.example.org"NEWLINE assert rr.domain_name == "www.example.org"NEWLINE rr.text = "sample text"NEWLINE assert rr.text == "sample text"NEWLINENEWLINENEWLINEclass TestMessage:NEWLINENEWLINE def test_json(self):NEWLINE resp = tutils.tdnsresp()NEWLINE json = resp.to_json()NEWLINE assert json["id"] == resp.idNEWLINE assert len(json["questions"]) == len(resp.questions)NEWLINE assert json["questions"][0]["name"] == resp.questions[0].nameNEWLINE assert len(json["answers"]) == len(resp.answers)NEWLINE assert json["answers"][0]["data"] == str(resp.answers[0])NEWLINENEWLINE def test_responses(self):NEWLINE req = tutils.tdnsreq()NEWLINE resp = tutils.tdnsresp()NEWLINE resp2 = req.succeed([NEWLINE dns.ResourceRecord.A("dns.google", ipaddress.IPv4Address("8.8.8.8"), ttl=32),NEWLINE dns.ResourceRecord.A("dns.google", ipaddress.IPv4Address("8.8.4.4"), ttl=32)NEWLINE ])NEWLINE resp2.timestamp = resp.timestampNEWLINE assert resp == resp2NEWLINE assert resp2.size == 8NEWLINE with pytest.raises(ValueError):NEWLINE req.fail(dns.response_codes.NOERROR)NEWLINE assert req.fail(dns.response_codes.FORMERR).response_code == dns.response_codes.FORMERRNEWLINENEWLINE def test_range(self):NEWLINE def test(what: str, min: int, max: int):NEWLINE req = tutils.tdnsreq()NEWLINE setattr(req, what, min)NEWLINE assert getattr(dns.Message.unpack(req.packed), what) == minNEWLINE setattr(req, what, min - 1)NEWLINE with pytest.raises(ValueError):NEWLINE req.packedNEWLINE setattr(req, what, max)NEWLINE assert getattr(dns.Message.unpack(req.packed), what) == maxNEWLINE setattr(req, what, max + 1)NEWLINE with pytest.raises(ValueError):NEWLINE req.packedNEWLINENEWLINE test("id", 0, 2 ** 16 - 1)NEWLINE test("reserved", 0, 7)NEWLINE test("op_code", 0, 0b1111)NEWLINE test("response_code", 0, 0b1111)NEWLINENEWLINE def test_packing(self):NEWLINE def assert_eq(m: dns.Message, b: bytes) -> None:NEWLINE m_b = dns.Message.unpack(b)NEWLINE m_b.timestamp = m.timestampNEWLINE assert m_b == mNEWLINE assert m_b.packed == m.packedNEWLINENEWLINE assert_eq(tutils.tdnsreq(), b'\x00\x2a\x01\x00\x00\x01\x00\x00\x00\x00\x00\x00\x03dns\x06google\x00\x00\x01\x00\x01')NEWLINE with pytest.raises(struct.error):NEWLINE dns.Message.unpack(b'\x00\x2a\x01\x00\x00\x01\x00\x00\x00\x00\x00\x00\x03dns\x06google\x00\x00\x01\x00\x01\x00')NEWLINE assert_eq(tutils.tdnsresp(), (NEWLINE b'\x00\x2a\x81\x80\x00\x01\x00\x02\x00\x00\x00\x00\x03dns\x06google\x00\x00\x01\x00\x01' +NEWLINE b'\xc0\x0c\x00\x01\x00\x01\x00\x00\x00 \x00\x04\x08\x08\x08\x08\xc0\x0c\x00\x01\x00\x01\x00\x00\x00 \x00\x04\x08\x08\x04\x04'NEWLINE ))NEWLINE with pytest.raises(struct.error): # question errorNEWLINE dns.Message.unpack(b'\x00\x2a\x01\x00\x00\x01\x00\x00\x00\x00\x00\x00\x03dns\x06goo')NEWLINE with pytest.raises(struct.error): # rr length errorNEWLINE dns.Message.unpack(NEWLINE b'\x00\x2a\x81\x80\x00\x01\x00\x02\x00\x00\x00\x00\x03dns\x06google\x00\x00\x01\x00\x01' +NEWLINE b'\xc0\x0c\x00\x01\x00\x01\x00\x00\x00 \x00\x04\x08\x08\x08\x08\xc0\x0c\x00\x01\x00\x01\x00\x00\x00 \x00\x04\x08\x08\x04'NEWLINE )NEWLINE txt = dns.Message.unpack(NEWLINE b'V\x1a\x81\x80\x00\x01\x00\x01\x00\x01\x00\x01\x05alive\x06github\x03com\x00\x00' +NEWLINE b'\x10\x00\x01\xc0\x0c\x00\x05\x00\x01\x00\x00\x0b\xc6\x00\x07\x04live\xc0\x12\xc0\x12\x00\x06\x00\x01' +NEWLINE b'\x00\x00\x03\x84\x00H\x07ns-1707\tawsdns-21\x02co\x02uk\x00\x11awsdns-hostmaster\x06amazon\xc0\x19\x00' +NEWLINE b'\x00\x00\x01\x00\x00\x1c \x00\x00\x03\x84\x00\x12u\x00\x00\x01Q\x80\x00\x00)\x02\x00\x00\x00\x00\x00\x00\x00'NEWLINE )NEWLINE assert txt.answers[0].domain_name == "live.github.com"NEWLINE invalid_rr_domain_name = dns.Message.unpack(NEWLINE b'V\x1a\x81\x80\x00\x01\x00\x01\x00\x01\x00\x01\x05alive\x06github\x03com\x00\x00' +NEWLINE b'\x10\x00\x01\xc0\x0c\x00\x05\x00\x01\x00\x00\x0b\xc6\x00\x07\x99live\xc0\x12\xc0\x12\x00\x06\x00\x01' +NEWLINE b'\x00\x00\x03\x84\x00H\x07ns-1707\tawsdns-21\x02co\x02uk\x00\x11awsdns-hostmaster\x06amazon\xc0\x19\x00' +NEWLINE b'\x00\x00\x01\x00\x00\x1c \x00\x00\x03\x84\x00\x12u\x00\x00\x01Q\x80\x00\x00)\x02\x00\x00\x00\x00\x00\x00\x00'NEWLINE )NEWLINE assert invalid_rr_domain_name.answers[0].data == b'\x99live\xc0\x12'NEWLINENEWLINE req = tutils.tdnsreq()NEWLINE for flag in "authoritative_answer", "truncation", "recursion_desired", "recursion_available":NEWLINE setattr(req, flag, True)NEWLINE assert getattr(dns.Message.unpack(req.packed), flag) is TrueNEWLINE setattr(req, flag, False)NEWLINE assert getattr(dns.Message.unpack(req.packed), flag) is FalseNEWLINENEWLINE def test_copy(self):NEWLINE msg = tutils.tdnsresp()NEWLINE assert dns.Message.from_state(msg.get_state()) == msgNEWLINE copy = msg.copy()NEWLINE assert copy is not msgNEWLINE assert copy != msgNEWLINE copy.id = msg.idNEWLINE assert copy == msgNEWLINE assert copy.questions is not msg.questionsNEWLINE assert copy.questions == msg.questionsNEWLINE assert copy.answers is not msg.answersNEWLINE assert copy.answers == msg.answersNEWLINE assert copy.authorities is not msg.authoritiesNEWLINE assert copy.authorities == msg.authoritiesNEWLINE assert copy.additionals is not msg.additionalsNEWLINE assert copy.additionals == msg.additionalsNEWLINENEWLINENEWLINEclass TestDNSFlow:NEWLINENEWLINE def test_copy(self):NEWLINE f = tflow.tdnsflow(resp=True)NEWLINE assert repr(f)NEWLINE f.get_state()NEWLINE f2 = f.copy()NEWLINE a = f.get_state()NEWLINE b = f2.get_state()NEWLINE del a["id"]NEWLINE del b["id"]NEWLINE assert a == bNEWLINE assert not f == f2NEWLINE assert f is not f2NEWLINENEWLINE assert f.request.get_state() == f2.request.get_state()NEWLINE assert f.request is not f2.requestNEWLINE assert f.request == f2.requestNEWLINE assert f.response is not f2.responseNEWLINE assert f.response.get_state() == f2.response.get_state()NEWLINE assert f.response == f2.responseNEWLINENEWLINE f = tflow.tdnsflow(err=True)NEWLINE f2 = f.copy()NEWLINE assert f is not f2NEWLINE assert f.request is not f2.requestNEWLINE assert f.request == f2.requestNEWLINE assert f.error.get_state() == f2.error.get_state()NEWLINE assert f.error is not f2.errorNEWLINENEWLINE def test_match(self):NEWLINE f = tflow.tdnsflow(resp=True)NEWLINE assert not flowfilter.match("~b nonexistent", f)NEWLINE assert flowfilter.match(None, f)NEWLINE assert flowfilter.match("~b dns.google", f)NEWLINE assert flowfilter.match("~b 8.8.8.8", f)NEWLINENEWLINE assert flowfilter.match("~bq dns.google", f)NEWLINE assert not flowfilter.match("~bq 8.8.8.8", f)NEWLINENEWLINE assert flowfilter.match("~bs dns.google", f)NEWLINE assert flowfilter.match("~bs 8.8.4.4", f)NEWLINENEWLINE assert flowfilter.match("~dns", f)NEWLINE assert not flowfilter.match("~dns", tflow.ttcpflow())NEWLINE assert not flowfilter.match("~dns", tflow.tflow())NEWLINENEWLINE f = tflow.tdnsflow(err=True)NEWLINE assert flowfilter.match("~e", f)NEWLINENEWLINE with pytest.raises(ValueError):NEWLINE flowfilter.match("~", f)NEWLINENEWLINE def test_repr(self):NEWLINE f = tflow.tdnsflow()NEWLINE assert 'DNSFlow' in repr(f)NEWLINE |
# Licensed under a 3-clause BSD style license - see LICENSE.rstNEWLINENEWLINE"""NEWLINEThis module provides utility functions for the models packageNEWLINE"""NEWLINENEWLINENEWLINEfrom collections import dequeNEWLINEfrom collections.abc import MutableMappingNEWLINEfrom inspect import signatureNEWLINENEWLINEimport numpy as npNEWLINENEWLINENEWLINEfrom ..utils import isiterable, check_broadcastNEWLINEfrom ..utils.compat import NUMPY_LT_1_14NEWLINENEWLINEfrom .. import units as uNEWLINENEWLINE__all__ = ['ExpressionTree', 'AliasDict', 'check_broadcast',NEWLINE 'poly_map_domain', 'comb', 'ellipse_extent']NEWLINENEWLINENEWLINEclass ExpressionTree:NEWLINE __slots__ = ['left', 'right', 'value', 'inputs', 'outputs']NEWLINENEWLINE def __init__(self, value, left=None, right=None, inputs=None, outputs=None):NEWLINE self.value = valueNEWLINE self.inputs = inputsNEWLINE self.outputs = outputsNEWLINE self.left = leftNEWLINENEWLINE # Two subtrees can't be the same *object* or else traverse_postorderNEWLINE # breaks, so we just always copy the right subtree to subvert that.NEWLINE if right is not None and left is right:NEWLINE right = right.copy()NEWLINENEWLINE self.right = rightNEWLINENEWLINE def __getstate__(self):NEWLINE # For some reason the default pickle protocol on Python 2 does not justNEWLINE # do this. On Python 3 it's not a problem.NEWLINE return dict((slot, getattr(self, slot)) for slot in self.__slots__)NEWLINENEWLINE def __setstate__(self, state):NEWLINE for slot, value in state.items():NEWLINE setattr(self, slot, value)NEWLINENEWLINE @staticmethodNEWLINE def _recursive_lookup(branch, adict, key):NEWLINE if isinstance(branch, ExpressionTree):NEWLINE return adict[key]NEWLINE else:NEWLINE return branch, keyNEWLINENEWLINE @propertyNEWLINE def inputs_map(self):NEWLINE """NEWLINE Map the names of the inputs to this ExpressionTree to the inputs to the leaf models.NEWLINE """NEWLINE inputs_map = {}NEWLINE if not isinstance(self.value, str): # If we don't have an operator the mapping is trivialNEWLINE return {inp: (self.value, inp) for inp in self.inputs}NEWLINENEWLINE elif self.value == '|':NEWLINE for inp in self.inputs:NEWLINE m, inp2 = self._recursive_lookup(self.left, self.left.inputs_map, inp)NEWLINE inputs_map[inp] = m, inp2NEWLINENEWLINE elif self.value == '&':NEWLINE for i, inp in enumerate(self.inputs):NEWLINE if i < len(self.left.inputs): # Get from leftNEWLINE m, inp2 = self._recursive_lookup(self.left,NEWLINE self.left.inputs_map,NEWLINE self.left.inputs[i])NEWLINE inputs_map[inp] = m, inp2NEWLINE else: # Get from rightNEWLINE m, inp2 = self._recursive_lookup(self.right,NEWLINE self.right.inputs_map,NEWLINE self.right.inputs[i - len(self.left.inputs)])NEWLINE inputs_map[inp] = m, inp2NEWLINENEWLINE else:NEWLINE for inp in self.left.inputs:NEWLINE m, inp2 = self._recursive_lookup(self.left, self.left.inputs_map, inp)NEWLINE inputs_map[inp] = m, inp2NEWLINENEWLINE return inputs_mapNEWLINENEWLINE @propertyNEWLINE def outputs_map(self):NEWLINE """NEWLINE Map the names of the outputs to this ExpressionTree to the outputs to the leaf models.NEWLINE """NEWLINE outputs_map = {}NEWLINE if not isinstance(self.value, str): # If we don't have an operator the mapping is trivialNEWLINE return {out: (self.value, out) for out in self.outputs}NEWLINENEWLINE elif self.value == '|':NEWLINE for out in self.outputs:NEWLINE m, out2 = self._recursive_lookup(self.right, self.right.outputs_map, out)NEWLINE outputs_map[out] = m, out2NEWLINENEWLINE elif self.value == '&':NEWLINE for i, out in enumerate(self.outputs):NEWLINE if i < len(self.left.outputs): # Get from leftNEWLINE m, out2 = self._recursive_lookup(self.left,NEWLINE self.left.outputs_map,NEWLINE self.left.outputs[i])NEWLINE outputs_map[out] = m, out2NEWLINE else: # Get from rightNEWLINE m, out2 = self._recursive_lookup(self.right,NEWLINE self.right.outputs_map,NEWLINE self.right.outputs[i - len(self.left.outputs)])NEWLINE outputs_map[out] = m, out2NEWLINENEWLINE else:NEWLINE for out in self.left.outputs:NEWLINE m, out2 = self._recursive_lookup(self.left, self.left.outputs_map, out)NEWLINE outputs_map[out] = m, out2NEWLINENEWLINE return outputs_mapNEWLINENEWLINE @propertyNEWLINE def isleaf(self):NEWLINE return self.left is None and self.right is NoneNEWLINENEWLINE def traverse_preorder(self):NEWLINE stack = deque([self])NEWLINE while stack:NEWLINE node = stack.pop()NEWLINE yield nodeNEWLINENEWLINE if node.right is not None:NEWLINE stack.append(node.right)NEWLINE if node.left is not None:NEWLINE stack.append(node.left)NEWLINENEWLINE def traverse_inorder(self):NEWLINE stack = deque()NEWLINE node = selfNEWLINE while stack or node is not None:NEWLINE if node is not None:NEWLINE stack.append(node)NEWLINE node = node.leftNEWLINE else:NEWLINE node = stack.pop()NEWLINE yield nodeNEWLINE node = node.rightNEWLINENEWLINE def traverse_postorder(self):NEWLINE stack = deque([self])NEWLINE last = NoneNEWLINE while stack:NEWLINE node = stack[-1]NEWLINE if last is None or node is last.left or node is last.right:NEWLINE if node.left is not None:NEWLINE stack.append(node.left)NEWLINE elif node.right is not None:NEWLINE stack.append(node.right)NEWLINE elif node.left is last and node.right is not None:NEWLINE stack.append(node.right)NEWLINE else:NEWLINE yield stack.pop()NEWLINE last = nodeNEWLINENEWLINE def evaluate(self, operators, getter=None, start=0, stop=None):NEWLINE """Evaluate the expression represented by this tree.NEWLINENEWLINE ``Operators`` should be a dictionary mapping operator names ('tensor',NEWLINE 'product', etc.) to a function that implements that operator for theNEWLINE correct number of operands.NEWLINENEWLINE If given, ``getter`` is a function evaluated on each *leaf* node'sNEWLINE value before applying the operator between them. This could be used,NEWLINE for example, to operate on an attribute of the node values rather thanNEWLINE directly on the node values. The ``getter`` is passed both the indexNEWLINE of the leaf (a count starting at 0 that is incremented after each leafNEWLINE is found) and the leaf node itself.NEWLINENEWLINE The ``start`` and ``stop`` arguments allow evaluating a sub-expressionNEWLINE within the expression tree.NEWLINENEWLINE TODO: Document this better.NEWLINE """NEWLINENEWLINE stack = deque()NEWLINENEWLINE if getter is None:NEWLINE getter = lambda idx, value: valueNEWLINENEWLINE if start is None:NEWLINE start = 0NEWLINENEWLINE leaf_idx = 0NEWLINE for node in self.traverse_postorder():NEWLINE if node.isleaf:NEWLINE # For a "tree" containing just a single operator at the rootNEWLINE # Also push the index of this leaf onto the stack, which willNEWLINE # prove useful for evaluating subexpressionsNEWLINE stack.append((getter(leaf_idx, node.value), leaf_idx))NEWLINE leaf_idx += 1NEWLINE else:NEWLINE operator = operators[node.value]NEWLINENEWLINE if len(stack) < 2:NEWLINE # Skip this operator if there are not enough operands onNEWLINE # the stack; this can happen if some operands were skippedNEWLINE # when evaluating a sub-expressionNEWLINE continueNEWLINENEWLINE right = stack.pop()NEWLINE left = stack.pop()NEWLINE operands = []NEWLINENEWLINE for operand in (left, right):NEWLINE # idx is the leaf index; -1 if not a leaf nodeNEWLINE if operand[-1] == -1:NEWLINE operands.append(operand)NEWLINE else:NEWLINE operand, idx = operandNEWLINE if start <= idx and (stop is None or idx < stop):NEWLINE operands.append((operand, idx))NEWLINENEWLINE if len(operands) == 2:NEWLINE # evaluate the operator with the given operands and placeNEWLINE # the result on the stack (with -1 for the "leaf index"NEWLINE # since this result is not a leaf nodeNEWLINE left, right = operandsNEWLINE stack.append((operator(left[0], right[0]), -1))NEWLINE elif len(operands) == 0:NEWLINE # Just push the left one back on the stackNEWLINE # TODO: Explain and/or refactor this betterNEWLINE # This is here because even if both operands were "skipped"NEWLINE # due to being outside the (start, stop) range, we've onlyNEWLINE # skipped one operator. But there should be at least 2NEWLINE # operators involving these operands, so we push the oneNEWLINE # from the left back onto the stack so that the nextNEWLINE # operator will be skipped as well. Should probably comeNEWLINE # up with an easier to follow way to write this algorithmNEWLINE stack.append(left)NEWLINE else:NEWLINE # one or more of the operands was not included in theNEWLINE # sub-expression slice, so don't evaluate the operator;NEWLINE # instead place left over operands (if any) back on theNEWLINE # stack for later useNEWLINE stack.extend(operands)NEWLINENEWLINE return stack.pop()[0]NEWLINENEWLINE def copy(self):NEWLINE # Hopefully this won't blow the stack for any practical case; if such aNEWLINE # case arises that this won't work then I suppose we can find anNEWLINE # iterative approach.NEWLINENEWLINE children = []NEWLINE for child in (self.left, self.right):NEWLINE if isinstance(child, ExpressionTree):NEWLINE children.append(child.copy())NEWLINE else:NEWLINE children.append(child)NEWLINENEWLINE return self.__class__(self.value, left=children[0], right=children[1])NEWLINENEWLINE def format_expression(self, operator_precedence, format_leaf=None):NEWLINE leaf_idx = 0NEWLINE operands = deque()NEWLINENEWLINE if format_leaf is None:NEWLINE format_leaf = lambda i, l: '[{0}]'.format(i)NEWLINENEWLINE for node in self.traverse_postorder():NEWLINE if node.isleaf:NEWLINE operands.append(format_leaf(leaf_idx, node))NEWLINE leaf_idx += 1NEWLINE continueNEWLINENEWLINE oper_order = operator_precedence[node.value]NEWLINE right = operands.pop()NEWLINE left = operands.pop()NEWLINENEWLINE if (node.left is not None and not node.left.isleaf andNEWLINE operator_precedence[node.left.value] < oper_order):NEWLINE left = '({0})'.format(left)NEWLINE if (node.right is not None and not node.right.isleaf andNEWLINE operator_precedence[node.right.value] < oper_order):NEWLINE right = '({0})'.format(right)NEWLINENEWLINE operands.append(' '.join((left, node.value, right)))NEWLINENEWLINE return ''.join(operands)NEWLINENEWLINENEWLINEclass AliasDict(MutableMapping):NEWLINE """NEWLINE Creates a `dict` like object that wraps an existing `dict` or otherNEWLINE `MutableMapping`, along with a `dict` of *key aliases* that translateNEWLINE between specific keys in this dict to different keys in the underlyingNEWLINE dict.NEWLINENEWLINE In other words, keys that do not have an associated alias are accessed andNEWLINE stored like a normal `dict`. However, a key that has an alias is accessedNEWLINE and stored to the "parent" dict via the alias.NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE parent : dict-likeNEWLINE The parent `dict` that aliased keys and accessed from and stored to.NEWLINENEWLINE aliases : dict-likeNEWLINE Maps keys in this dict to their associated keys in the parent dict.NEWLINENEWLINE ExamplesNEWLINE --------NEWLINENEWLINE >>> parent = {'a': 1, 'b': 2, 'c': 3}NEWLINE >>> aliases = {'foo': 'a', 'bar': 'c'}NEWLINE >>> alias_dict = AliasDict(parent, aliases)NEWLINE >>> alias_dict['foo']NEWLINE 1NEWLINE >>> alias_dict['bar']NEWLINE 3NEWLINENEWLINE Keys in the original parent dict are not visible if they were notNEWLINE aliased::NEWLINENEWLINE >>> alias_dict['b']NEWLINE Traceback (most recent call last):NEWLINE ...NEWLINE KeyError: 'b'NEWLINENEWLINE Likewise, updates to aliased keys are reflected back in the parent dict::NEWLINENEWLINE >>> alias_dict['foo'] = 42NEWLINE >>> alias_dict['foo']NEWLINE 42NEWLINE >>> parent['a']NEWLINE 42NEWLINENEWLINE However, updates/insertions to keys that are *not* aliased are notNEWLINE reflected in the parent dict::NEWLINENEWLINE >>> alias_dict['qux'] = 99NEWLINE >>> alias_dict['qux']NEWLINE 99NEWLINE >>> 'qux' in parentNEWLINE FalseNEWLINENEWLINE In particular, updates on the `AliasDict` to a key that is equal toNEWLINE one of the aliased keys in the parent dict does *not* update the parentNEWLINE dict. For example, ``alias_dict`` aliases ``'foo'`` to ``'a'``. ButNEWLINE assigning to a key ``'a'`` on the `AliasDict` does not impact theNEWLINE parent::NEWLINENEWLINE >>> alias_dict['a'] = 'nope'NEWLINE >>> alias_dict['a']NEWLINE 'nope'NEWLINE >>> parent['a']NEWLINE 42NEWLINE """NEWLINENEWLINE _store_type = dictNEWLINE """NEWLINE Subclasses may override this to use other mapping types as the underlyingNEWLINE storage, for example an `OrderedDict`. However, even in this caseNEWLINE additional work may be needed to get things like the ordering right.NEWLINE """NEWLINENEWLINE def __init__(self, parent, aliases):NEWLINE self._parent = parentNEWLINE self._store = self._store_type()NEWLINE self._aliases = dict(aliases)NEWLINENEWLINE def __getitem__(self, key):NEWLINE if key in self._aliases:NEWLINE try:NEWLINE return self._parent[self._aliases[key]]NEWLINE except KeyError:NEWLINE raise KeyError(key)NEWLINENEWLINE return self._store[key]NEWLINENEWLINE def __setitem__(self, key, value):NEWLINE if key in self._aliases:NEWLINE self._parent[self._aliases[key]] = valueNEWLINE else:NEWLINE self._store[key] = valueNEWLINENEWLINE def __delitem__(self, key):NEWLINE if key in self._aliases:NEWLINE try:NEWLINE del self._parent[self._aliases[key]]NEWLINE except KeyError:NEWLINE raise KeyError(key)NEWLINE else:NEWLINE del self._store[key]NEWLINENEWLINE def __iter__(self):NEWLINE """NEWLINE First iterates over keys from the parent dict (if the aliased keys areNEWLINE present in the parent), followed by any keys in the local store.NEWLINE """NEWLINENEWLINE for key, alias in self._aliases.items():NEWLINE if alias in self._parent:NEWLINE yield keyNEWLINENEWLINE for key in self._store:NEWLINE yield keyNEWLINENEWLINE def __len__(self):NEWLINE # TODO:NEWLINE # This could be done more efficiently, but at present the use case forNEWLINE # it is narrow if non-existent.NEWLINE return len(list(iter(self)))NEWLINENEWLINE def __repr__(self):NEWLINE # repr() just like any other dict--this should look transparentNEWLINE store_copy = self._store_type()NEWLINE for key, alias in self._aliases.items():NEWLINE if alias in self._parent:NEWLINE store_copy[key] = self._parent[alias]NEWLINENEWLINE store_copy.update(self._store)NEWLINENEWLINE return repr(store_copy)NEWLINENEWLINENEWLINEclass _BoundingBox(tuple):NEWLINE """NEWLINE Base class for models with custom bounding box templates (methods thatNEWLINE return an actual bounding box tuple given some adjustable parameters--seeNEWLINE for example `~astropy.modeling.models.Gaussian1D.bounding_box`).NEWLINENEWLINE On these classes the ``bounding_box`` property still returns a `tuple`NEWLINE giving the default bounding box for that instance of the model. But thatNEWLINE tuple may also be a subclass of this class that is callable, and allowsNEWLINE a new tuple to be returned using a user-supplied value for any adjustableNEWLINE parameters to the bounding box.NEWLINE """NEWLINENEWLINE _model = NoneNEWLINENEWLINE def __new__(cls, input_, _model=None):NEWLINE self = super().__new__(cls, input_)NEWLINE if _model is not None:NEWLINE # Bind this _BoundingBox (most likely a subclass) to a ModelNEWLINE # instance so that its __call__ can access the modelNEWLINE self._model = _modelNEWLINENEWLINE return selfNEWLINENEWLINE def __call__(self, *args, **kwargs):NEWLINE raise NotImplementedError(NEWLINE "This bounding box is fixed by the model and does not have "NEWLINE "adjustable parameters.")NEWLINENEWLINE @classmethodNEWLINE def validate(cls, model, bounding_box):NEWLINE """NEWLINE Validate a given bounding box sequence against the given model (whichNEWLINE may be either a subclass of `~astropy.modeling.Model` or an instanceNEWLINE thereof, so long as the ``.inputs`` attribute is defined.NEWLINENEWLINE Currently this just checks that the bounding_box is either a 2-tupleNEWLINE of lower and upper bounds for 1-D models, or an N-tuple of 2-tuplesNEWLINE for N-D models.NEWLINENEWLINE This also returns a normalized version of the bounding_box input toNEWLINE ensure it is always an N-tuple (even for the 1-D case).NEWLINE """NEWLINENEWLINE nd = model.n_inputsNEWLINENEWLINE if nd == 1:NEWLINE if (not isiterable(bounding_box)NEWLINE or np.shape(bounding_box) not in ((2,), (1, 2))):NEWLINE raise ValueError(NEWLINE "Bounding box for {0} model must be a sequence of length "NEWLINE "2 consisting of a lower and upper bound, or a 1-tuple "NEWLINE "containing such a sequence as its sole element.".format(NEWLINE model.name))NEWLINENEWLINE if len(bounding_box) == 1:NEWLINE return cls((tuple(bounding_box[0]),))NEWLINE else:NEWLINE return cls(tuple(bounding_box))NEWLINE else:NEWLINE if (not isiterable(bounding_box)NEWLINE or np.shape(bounding_box) != (nd, 2)):NEWLINE raise ValueError(NEWLINE "Bounding box for {0} model must be a sequence of length "NEWLINE "{1} (the number of model inputs) consisting of pairs of "NEWLINE "lower and upper bounds for those inputs on which to "NEWLINE "evaluate the model.".format(model.name, nd))NEWLINENEWLINE return cls(tuple(bounds) for bounds in bounding_box)NEWLINENEWLINENEWLINEdef make_binary_operator_eval(oper, f, g):NEWLINE """NEWLINE Given a binary operator (as a callable of two arguments) ``oper`` andNEWLINE two callables ``f`` and ``g`` which accept the same arguments,NEWLINE returns a *new* function that takes the same arguments as ``f`` and ``g``,NEWLINE but passes the outputs of ``f`` and ``g`` in the given ``oper``.NEWLINENEWLINE ``f`` and ``g`` are assumed to return tuples (which may be 1-tuples). TheNEWLINE given operator is applied element-wise to tuple outputs).NEWLINENEWLINE ExampleNEWLINE -------NEWLINENEWLINE >>> from operator import addNEWLINE >>> def prod(x, y):NEWLINE ... return (x * y,)NEWLINE ...NEWLINE >>> sum_of_prod = make_binary_operator_eval(add, prod, prod)NEWLINE >>> sum_of_prod(3, 5)NEWLINE (30,)NEWLINE """NEWLINENEWLINE return lambda inputs, params: \NEWLINE tuple(oper(x, y) for x, y in zip(f(inputs, params),NEWLINE g(inputs, params)))NEWLINENEWLINENEWLINEdef poly_map_domain(oldx, domain, window):NEWLINE """NEWLINE Map domain into window by shifting and scaling.NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE oldx : arrayNEWLINE original coordinatesNEWLINE domain : list or tuple of length 2NEWLINE function domainNEWLINE window : list or tuple of length 2NEWLINE range into which to map the domainNEWLINE """NEWLINE domain = np.array(domain, dtype=np.float64)NEWLINE window = np.array(window, dtype=np.float64)NEWLINE scl = (window[1] - window[0]) / (domain[1] - domain[0])NEWLINE off = (window[0] * domain[1] - window[1] * domain[0]) / (domain[1] - domain[0])NEWLINE return off + scl * oldxNEWLINENEWLINENEWLINEdef comb(N, k):NEWLINE """NEWLINE The number of combinations of N things taken k at a time.NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE N : int, arrayNEWLINE Number of things.NEWLINE k : int, arrayNEWLINE Number of elements taken.NEWLINENEWLINE """NEWLINE if (k > N) or (N < 0) or (k < 0):NEWLINE return 0NEWLINE val = 1NEWLINE for j in range(min(k, N - k)):NEWLINE val = (val * (N - j)) / (j + 1)NEWLINE return valNEWLINENEWLINENEWLINEdef array_repr_oneline(array):NEWLINE """NEWLINE Represents a multi-dimensional Numpy array flattened onto a single line.NEWLINE """NEWLINE sep = ',' if NUMPY_LT_1_14 else ', 'NEWLINE r = np.array2string(array, separator=sep, suppress_small=True)NEWLINE return ' '.join(l.strip() for l in r.splitlines())NEWLINENEWLINENEWLINEdef combine_labels(left, right):NEWLINE """NEWLINE For use with the join operator &: Combine left input/output labels withNEWLINE right input/output labels.NEWLINENEWLINE If none of the labels conflict then this just returns a sum of tuples.NEWLINE However if *any* of the labels conflict, this appends '0' to the left-handNEWLINE labels and '1' to the right-hand labels so there is no ambiguity).NEWLINE """NEWLINENEWLINE if set(left).intersection(right):NEWLINE left = tuple(l + '0' for l in left)NEWLINE right = tuple(r + '1' for r in right)NEWLINENEWLINE return left + rightNEWLINENEWLINENEWLINEdef ellipse_extent(a, b, theta):NEWLINE """NEWLINE Calculates the extent of a box encapsulating a rotated 2D ellipse.NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE a : float or `~astropy.units.Quantity`NEWLINE Major axis.NEWLINE b : float or `~astropy.units.Quantity`NEWLINE Minor axis.NEWLINE theta : float or `~astropy.units.Quantity`NEWLINE Rotation angle. If given as a floating-point value, it is assumed to beNEWLINE in radians.NEWLINENEWLINE ReturnsNEWLINE -------NEWLINE offsets : tupleNEWLINE The absolute value of the offset distances from the ellipse center thatNEWLINE define its bounding box region, ``(dx, dy)``.NEWLINENEWLINE ExamplesNEWLINE --------NEWLINE .. plot::NEWLINE :include-source:NEWLINENEWLINE import numpy as npNEWLINE import matplotlib.pyplot as pltNEWLINE from astropy.modeling.models import Ellipse2DNEWLINE from astropy.modeling.utils import ellipse_extent, render_modelNEWLINENEWLINE amplitude = 1NEWLINE x0 = 50NEWLINE y0 = 50NEWLINE a = 30NEWLINE b = 10NEWLINE theta = np.pi/4NEWLINENEWLINE model = Ellipse2D(amplitude, x0, y0, a, b, theta)NEWLINENEWLINE dx, dy = ellipse_extent(a, b, theta)NEWLINENEWLINE limits = [x0 - dx, x0 + dx, y0 - dy, y0 + dy]NEWLINENEWLINE model.bounding_box = limitsNEWLINENEWLINE image = render_model(model)NEWLINENEWLINE plt.imshow(image, cmap='binary', interpolation='nearest', alpha=.5,NEWLINE extent = limits)NEWLINE plt.show()NEWLINE """NEWLINENEWLINE t = np.arctan2(-b * np.tan(theta), a)NEWLINE dx = a * np.cos(t) * np.cos(theta) - b * np.sin(t) * np.sin(theta)NEWLINENEWLINE t = np.arctan2(b, a * np.tan(theta))NEWLINE dy = b * np.sin(t) * np.cos(theta) + a * np.cos(t) * np.sin(theta)NEWLINENEWLINE if isinstance(dx, u.Quantity) or isinstance(dy, u.Quantity):NEWLINE return np.abs(u.Quantity([dx, dy]))NEWLINE else:NEWLINE return np.abs([dx, dy])NEWLINENEWLINENEWLINEdef get_inputs_and_params(func):NEWLINE """NEWLINE Given a callable, determine the input variables and theNEWLINE parameters.NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE func : callableNEWLINENEWLINE ReturnsNEWLINE -------NEWLINE inputs, params : tupleNEWLINE Each entry is a list of inspect.Parameter objectsNEWLINE """NEWLINE sig = signature(func)NEWLINENEWLINE inputs = []NEWLINE params = []NEWLINE for param in sig.parameters.values():NEWLINE if param.kind in (param.VAR_POSITIONAL, param.VAR_KEYWORD):NEWLINE raise ValueError("Signature must not have *args or **kwargs")NEWLINE if param.default == param.empty:NEWLINE inputs.append(param)NEWLINE else:NEWLINE params.append(param)NEWLINENEWLINE return inputs, paramsNEWLINENEWLINENEWLINEdef _parameter_with_unit(parameter, unit):NEWLINE if parameter.unit is None:NEWLINE return parameter.value * unitNEWLINE else:NEWLINE return parameter.quantity.to(unit)NEWLINENEWLINENEWLINEdef _parameter_without_unit(value, old_unit, new_unit):NEWLINE if old_unit is None:NEWLINE return valueNEWLINE else:NEWLINE return value * old_unit.to(new_unit)NEWLINENEWLINENEWLINEdef _combine_equivalency_dict(keys, eq1=None, eq2=None):NEWLINE # Given two dictionaries that give equivalencies for a set of keys, forNEWLINE # example input value names, return a dictionary that includes all theNEWLINE # equivalenciesNEWLINE eq = {}NEWLINE for key in keys:NEWLINE eq[key] = []NEWLINE if eq1 is not None and key in eq1:NEWLINE eq[key].extend(eq1[key])NEWLINE if eq2 is not None and key in eq2:NEWLINE eq[key].extend(eq2[key])NEWLINE return eqNEWLINENEWLINENEWLINEdef _to_radian(value):NEWLINE """ Convert ``value`` to radian. """NEWLINE if isinstance(value, u.Quantity):NEWLINE return value.to(u.rad)NEWLINE else:NEWLINE return np.deg2rad(value)NEWLINENEWLINENEWLINEdef _to_orig_unit(value, raw_unit=None, orig_unit=None):NEWLINE """ Convert value with ``raw_unit`` to ``orig_unit``. """NEWLINE if raw_unit is not None:NEWLINE return (value * raw_unit).to(orig_unit)NEWLINE else:NEWLINE return np.rad2deg(value)NEWLINE |
"""boon-backend URL ConfigurationNEWLINENEWLINEThe `urlpatterns` list routes URLs to views. For more information please see:NEWLINE https://docs.djangoproject.com/en/3.2/topics/http/urls/NEWLINEExamples:NEWLINEFunction viewsNEWLINE 1. Add an import: from my_app import viewsNEWLINE 2. Add a URL to urlpatterns: path('', views.home, name='home')NEWLINEClass-based viewsNEWLINE 1. Add an import: from other_app.views import HomeNEWLINE 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')NEWLINEIncluding another URLconfNEWLINE 1. Import the include() function: from django.urls import include, pathNEWLINE 2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))NEWLINE"""NEWLINEfrom django.contrib import adminNEWLINEfrom django.urls import pathNEWLINENEWLINEurlpatterns = [NEWLINE path('admin/', admin.site.urls),NEWLINE]NEWLINE |
# Try getting setup from setuptools first, then distutils.core.NEWLINE# http://goo.gl/BC32zk (StackOverflow)NEWLINEtry:NEWLINE from setuptools import setupNEWLINEexcept ImportError:NEWLINE from distutils.core import setupNEWLINENEWLINEclassifiers = [NEWLINE 'Development Status :: 3 - Alpha',NEWLINE 'Intended Audience :: Science/Research',NEWLINE 'License :: OSI Approved :: MIT License',NEWLINE 'Operating System :: OS Independent',NEWLINE 'Programming Language :: Python',NEWLINE 'Programming Language :: Python :: 2',NEWLINE 'Programming Language :: Python :: 3',NEWLINE 'Topic :: Scientific/Engineering :: Visualization'NEWLINE ]NEWLINENEWLINEsetup(NEWLINE name = "quickplot",NEWLINE packages = ['quickplot'],NEWLINE version = "0.1.2",NEWLINE description = "The batteries-included plotting wrapper for matplotlib",NEWLINE author = "Ken Sheedlo",NEWLINE author_email = "[email protected]",NEWLINE url = "https://github.com/ksheedlo/quickplot",NEWLINE download_url = "https://github.com/ksheedlo/quickplot/archive/master.zip",NEWLINE classifiers = classifiers,NEWLINE dependency_links = ['https://github.com/matplotlib/matplotlib/tarball/v1.3.x#egg=matplotlib-1.3.0'],NEWLINE install_requires = [NEWLINE "numpy >= 1.5.0",NEWLINE "matplotlib >= 1.3.0"NEWLINE ]NEWLINE )NEWLINE |
#!/usr/bin/env pythonNEWLINE# -*- coding: utf-8 -*-NEWLINE#NEWLINE# Project: Fast Azimuthal integrationNEWLINE# https://github.com/silx-kit/pyFAINEWLINE#NEWLINE# Copyright (C) 2017-2019 European Synchrotron Radiation Facility, Grenoble, FranceNEWLINE#NEWLINE# Principal author: Jérôme Kieffer ([email protected])NEWLINE#NEWLINE# Permission is hereby granted, free of charge, to any person obtaining a copyNEWLINE# of this software and associated documentation files (the "Software"), to dealNEWLINE# in the Software without restriction, including without limitation the rightsNEWLINE# to use, copy, modify, merge, publish, distribute, sublicense, and/or sellNEWLINE# copies of the Software, and to permit persons to whom the Software isNEWLINE# furnished to do so, subject to the following conditions:NEWLINE# .NEWLINE# The above copyright notice and this permission notice shall be included inNEWLINE# all copies or substantial portions of the Software.NEWLINE# .NEWLINE# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS ORNEWLINE# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,NEWLINE# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THENEWLINE# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHERNEWLINE# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,NEWLINE# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS INNEWLINE# THE SOFTWARE.NEWLINENEWLINE"""Everything you need to calibrate a detector mounted on a goniometer or anyNEWLINEtranslation tableNEWLINE"""NEWLINENEWLINEfrom __future__ import absolute_import, print_function, with_statement, divisionNEWLINENEWLINE__author__ = "Jérôme Kieffer"NEWLINE__contact__ = "[email protected]"NEWLINE__license__ = "MIT"NEWLINE__copyright__ = "European Synchrotron Radiation Facility, Grenoble, France"NEWLINE__date__ = "22/08/2018"NEWLINE__status__ = "development"NEWLINE__docformat__ = 'restructuredtext'NEWLINENEWLINENEWLINEimport osNEWLINEimport loggingNEWLINEimport jsonNEWLINEimport numpyNEWLINEfrom collections import OrderedDict, namedtupleNEWLINEfrom scipy.optimize import minimizeNEWLINEfrom silx.image import marchingsquaresNEWLINEfrom .massif import MassifNEWLINEfrom .control_points import ControlPointsNEWLINEfrom .detectors import detector_factory, DetectorNEWLINEfrom .geometry import GeometryNEWLINEfrom .geometryRefinement import GeometryRefinementNEWLINEfrom .azimuthalIntegrator import AzimuthalIntegratorNEWLINEfrom .utils import StringTypesNEWLINEfrom .multi_geometry import MultiGeometryNEWLINEfrom .units import CONST_hc, CONST_qNEWLINENEWLINElogger = logging.getLogger(__name__)NEWLINENEWLINEtry:NEWLINE import numexprNEWLINEexcept ImportError:NEWLINE logger.debug("Backtrace", exc_info=True)NEWLINE numexpr = NoneNEWLINENEWLINE# Parameter set used in PyFAI:NEWLINEPoniParam = namedtuple("PoniParam", ["dist", "poni1", "poni2", "rot1", "rot2", "rot3"])NEWLINENEWLINENEWLINEclass BaseTransformation(object):NEWLINE """This class, once instanciated, behaves like a function (via the __call__NEWLINE method). It is responsible for taking any input geometry and translate itNEWLINE into a set of parameters compatible with pyFAI, i.e. a tuple with:NEWLINE (dist, poni1, poni2, rot1, rot2, rot3)NEWLINENEWLINE This class relies on a user provided function which does the work.NEWLINE """NEWLINE def __init__(self, funct, param_names, pos_names=None):NEWLINE """Constructor of the classNEWLINENEWLINE :param funct: function which takes as parameter the param_names and the pos_nameNEWLINE :param param_names: list of names of the parameters used in the modelNEWLINE :param pos_names: list of motor names for gonio with >1 degree of freedomNEWLINE """NEWLINE self.callable = functNEWLINE self.variables = {}NEWLINE self.param_names = tuple(param_names)NEWLINE if pos_names is not None:NEWLINE self.pos_names = tuple(pos_names)NEWLINE else:NEWLINE self.pos_names = ("pos",)NEWLINE for key in self.param_names + self.pos_names:NEWLINE if key in self.variables:NEWLINE raise RuntimeError("The keyword %s is already defined, please chose another variable name")NEWLINE self.variables[key] = numpy.NaNNEWLINE self.codes = []NEWLINENEWLINE def __call__(self, param, pos):NEWLINE """This makes the class instance behave like a function,NEWLINE actually a function that translates the n-parameter of the detectorNEWLINE positioning on the goniometer and the m-parameters.NEWLINE :param param: parameter of the fitNEWLINE :param pos: position of the goniometer (representation from theNEWLINE goniometer)NEWLINE :return: 6-tuple with (dist, poni1, poni2, rot1, rot2, rot3) as neededNEWLINE for pyFAI.NEWLINE """NEWLINE variables = self.variables.copy()NEWLINE for name, value in zip(self.param_names, param):NEWLINE variables[name] = valueNEWLINE if len(self.pos_names) == 1:NEWLINE variables[self.pos_names[0]] = posNEWLINE else:NEWLINE for name, value in zip(self.pos_names, pos):NEWLINE variables[name] = valueNEWLINENEWLINE res = self.callable(**variables)NEWLINE return PoniParam(*res)NEWLINENEWLINE def __repr__(self):NEWLINE return "BaseTransformation with param: %s and pos: %s" % (self.param_names, self.pos_names)NEWLINENEWLINE def to_dict(self):NEWLINE """Export the instance representation for serialization as a dictionaryNEWLINE """NEWLINE raise RuntimeError("BaseTransformation is not serializable")NEWLINENEWLINENEWLINEclass GeometryTransformation(object):NEWLINE """This class, once instanciated, behaves like a function (via the __call__NEWLINE method). It is responsible for taking any input geometry and translate itNEWLINE into a set of parameters compatible with pyFAI, i.e. a tuple with:NEWLINE (dist, poni1, poni2, rot1, rot2, rot3)NEWLINE This function uses numexpr for formula evaluation.NEWLINE """NEWLINE def __init__(self, dist_expr, poni1_expr, poni2_expr,NEWLINE rot1_expr, rot2_expr, rot3_expr,NEWLINE param_names, pos_names=None, constants=None,NEWLINE content=None):NEWLINE """Constructor of the classNEWLINENEWLINE :param dist_expr: formula (as string) providing with the distNEWLINE :param poni1_expr: formula (as string) providing with the poni1NEWLINE :param poni2_expr: formula (as string) providing with the poni2NEWLINE :param rot1_expr: formula (as string) providing with the rot1NEWLINE :param rot2_expr: formula (as string) providing with the rot2NEWLINE :param rot3_expr: formula (as string) providing with the rot3NEWLINE :param param_names: list of names of the parameters used in the modelNEWLINE :param pos_names: list of motor names for gonio with >1 degree of freedomNEWLINE :param constants: a dictionary with some constants the user may want to useNEWLINE :param content: Should be None or the name of the class (may be usedNEWLINE in the future to dispatch to multiple derivative classes)NEWLINE """NEWLINE if content is not None:NEWLINE # Ensures we use the constructor of the right classNEWLINE assert content in (self.__class__.__name__, "GeometryTransformation")NEWLINE if numexpr is None:NEWLINE raise RuntimeError("Geometry translation requires the *numexpr* package")NEWLINE self.dist_expr = dist_exprNEWLINE self.poni1_expr = poni1_exprNEWLINE self.poni2_expr = poni2_exprNEWLINE self.rot1_expr = rot1_exprNEWLINE self.rot2_expr = rot2_exprNEWLINE self.rot3_expr = rot3_exprNEWLINENEWLINE self.variables = {"pi": numpy.pi}NEWLINE if constants is not None:NEWLINE self.variables.update(constants)NEWLINENEWLINE self.param_names = tuple(param_names)NEWLINE if pos_names is not None:NEWLINE self.pos_names = tuple(pos_names)NEWLINE else:NEWLINE self.pos_names = ("pos",)NEWLINE for key in self.param_names + self.pos_names:NEWLINE if key in self.variables:NEWLINE raise RuntimeError("The keyword %s is already defined, please chose another variable name")NEWLINE self.variables[key] = numpy.NaNNEWLINENEWLINE self.codes = [numexpr.NumExpr(expr) for expr in (self.dist_expr, self.poni1_expr, self.poni2_expr,NEWLINE self.rot1_expr, self.rot2_expr, self.rot3_expr)]NEWLINENEWLINE def __call__(self, param, pos):NEWLINE """This makes the class instance behave like a function,NEWLINE actually a function that translates the n-parameter of the detectorNEWLINE positioning on the goniometer and the m-parameters.NEWLINE :param param: parameter of the fitNEWLINE :param pos: position of the goniometer (representation from theNEWLINE goniometer)NEWLINE :return: 6-tuple with (dist, poni1, poni2, rot1, rot2, rot3) as neededNEWLINE for pyFAI.NEWLINE """NEWLINE res = []NEWLINE variables = self.variables.copy()NEWLINE for name, value in zip(self.param_names, param):NEWLINE variables[name] = valueNEWLINE if len(self.pos_names) == 1:NEWLINE variables[self.pos_names[0]] = posNEWLINE else:NEWLINE for name, value in zip(self.pos_names, pos):NEWLINE variables[name] = valueNEWLINE for code in self.codes:NEWLINE signa = [variables.get(name, numpy.NaN) for name in code.input_names]NEWLINE res.append(float(code(*signa)))NEWLINE # could ne done in a single liner but harder to understand !NEWLINE return PoniParam(*res)NEWLINENEWLINE def __repr__(self):NEWLINE res = ["GeometryTransformation with param: %s and pos: %s" % (self.param_names, self.pos_names),NEWLINE " dist= %s" % self.dist_expr,NEWLINE " poni1= %s" % self.poni1_expr,NEWLINE " poni2= %s" % self.poni2_expr,NEWLINE " rot1= %s" % self.rot1_expr,NEWLINE " rot2= %s" % self.rot2_expr,NEWLINE " rot3= %s" % self.rot3_expr]NEWLINE return os.linesep.join(res)NEWLINENEWLINE def to_dict(self):NEWLINE """Export the instance representation for serialization as a dictionaryNEWLINE """NEWLINE res = OrderedDict([("content", self.__class__.__name__),NEWLINE ("param_names", self.param_names),NEWLINE ("pos_names", self.pos_names),NEWLINE ("dist_expr", self.dist_expr),NEWLINE ("poni1_expr", self.poni1_expr),NEWLINE ("poni2_expr", self.poni2_expr),NEWLINE ("rot1_expr", self.rot1_expr),NEWLINE ("rot2_expr", self.rot2_expr),NEWLINE ("rot3_expr", self.rot3_expr),NEWLINE ])NEWLINE constants = OrderedDict()NEWLINE for key, val in self.variables.items():NEWLINE if key in self.param_names:NEWLINE continueNEWLINE if self.pos_names and key in self.pos_names:NEWLINE continueNEWLINE constants[key] = valNEWLINE res["constants"] = constantsNEWLINE return resNEWLINENEWLINENEWLINEclass ExtendedTransformation(object):NEWLINE """This class behaves like GeometryTransformation and extends transformationNEWLINE to the wavelength parameter.NEWLINENEWLINE This function uses numexpr for formula evaluation.NEWLINE """NEWLINE def __init__(self, dist_expr=None, poni1_expr=None, poni2_expr=None,NEWLINE rot1_expr=None, rot2_expr=None, rot3_expr=None, wavelength_expr=None,NEWLINE param_names=None, pos_names=None, constants=None,NEWLINE content=None):NEWLINE """Constructor of the classNEWLINENEWLINE :param dist_expr: formula (as string) providing with the distNEWLINE :param poni1_expr: formula (as string) providing with the poni1NEWLINE :param poni2_expr: formula (as string) providing with the poni2NEWLINE :param rot1_expr: formula (as string) providing with the rot1NEWLINE :param rot2_expr: formula (as string) providing with the rot2NEWLINE :param rot3_expr: formula (as string) providing with the rot3NEWLINE :param wavelength_expr: formula (as a string) to calculate wavelength used in angstromNEWLINE :param param_names: list of names of the parameters used in the modelNEWLINE :param pos_names: list of motor names for gonio with >1 degree of freedomNEWLINE :param constants: a dictionary with some constants the user may want to useNEWLINE :param content: Should be None or the name of the class (may be usedNEWLINE in the future to dispatch to multiple derivative classes)NEWLINE """NEWLINE if content is not None:NEWLINE # Ensures we use the constructor of the right classNEWLINE assert content in (self.__class__.__name__, "ExtendedTransformation")NEWLINE if numexpr is None:NEWLINE raise RuntimeError("This Transformation requires the *numexpr* package")NEWLINE self.expressions = OrderedDict()NEWLINENEWLINE if dist_expr is not None:NEWLINE self.expressions["dist"] = dist_exprNEWLINE if poni1_expr is not None:NEWLINE self.expressions["poni1"] = poni1_exprNEWLINE if poni2_expr is not None:NEWLINE self.expressions["poni2"] = poni2_exprNEWLINE if rot1_expr is not None:NEWLINE self.expressions["rot1"] = rot1_exprNEWLINE if rot2_expr is not None:NEWLINE self.expressions["rot2"] = rot2_exprNEWLINE if rot3_expr is not None:NEWLINE self.expressions["rot3"] = rot3_exprNEWLINE if wavelength_expr is not None:NEWLINE self.expressions["wavelength"] = wavelength_exprNEWLINE self.ParamNT = namedtuple("ParamNT", list(self.expressions.keys()))NEWLINE self.variables = {"pi": numpy.pi,NEWLINE "hc": CONST_hc,NEWLINE "q": CONST_q}NEWLINE if constants is not None:NEWLINE self.variables.update(constants)NEWLINE self.param_names = tuple(param_names) if param_names is not None else tuple()NEWLINE if pos_names is not None:NEWLINE self.pos_names = tuple(pos_names)NEWLINE else:NEWLINE self.pos_names = ("pos",)NEWLINE for key in self.param_names + self.pos_names:NEWLINE if key in self.variables:NEWLINE raise RuntimeError("The keyword %s is already defined, please chose another variable name")NEWLINE self.variables[key] = numpy.NaNNEWLINENEWLINE self.codes = OrderedDict(((name, numexpr.NumExpr(expr)) for name, expr in self.expressions.items()))NEWLINENEWLINE def __call__(self, param, pos):NEWLINE """This makes the class instance behave like a function,NEWLINE actually a function that translates the n-parameter of the detectorNEWLINE positioning on the goniometer and the m-parameters.NEWLINENEWLINE :param param: parameter of the fitNEWLINE :param pos: position of the goniometer (representation from theNEWLINE goniometer)NEWLINE :return: 6-tuple with (dist, poni1, poni2, rot1, rot2, rot3) as neededNEWLINE for pyFAI.NEWLINE """NEWLINE res = {}NEWLINE variables = self.variables.copy()NEWLINE for name, value in zip(self.param_names, param):NEWLINE variables[name] = valueNEWLINE if len(self.pos_names) == 1:NEWLINE variables[self.pos_names[0]] = posNEWLINE else:NEWLINE for name, value in zip(self.pos_names, pos):NEWLINE variables[name] = valueNEWLINE for name, code in self.codes.items():NEWLINE signa = [variables.get(name, numpy.NaN) for name in code.input_names]NEWLINE res[name] = (float(code(*signa)))NEWLINE # could ne done in a single liner but harder to understand !NEWLINE return self.ParamNT(**res)NEWLINENEWLINE def __repr__(self):NEWLINE res = ["%s with param: %s and pos: %s" % (self.__class__.__name__, self.param_names, self.pos_names), ]NEWLINE for name, expr in self.expressions.items():NEWLINE res.append(" %s= %s" % (name, expr))NEWLINE return os.linesep.join(res)NEWLINENEWLINE def to_dict(self):NEWLINE """Export the instance representation for serialization as a dictionaryNEWLINE """NEWLINE res = OrderedDict([("content", self.__class__.__name__),NEWLINE ("param_names", self.param_names),NEWLINE ("pos_names", self.pos_names),NEWLINE ])NEWLINE for name, expr in self.expressions.items():NEWLINE res[name + "_expr"] = exprNEWLINE constants = OrderedDict()NEWLINE for key, val in self.variables.items():NEWLINE if key in self.param_names:NEWLINE continueNEWLINE if self.pos_names and key in self.pos_names:NEWLINE continueNEWLINE constants[key] = valNEWLINE res["constants"] = constantsNEWLINE return resNEWLINENEWLINENEWLINEGeometryTranslation = GeometryTransformationNEWLINENEWLINENEWLINEclass Goniometer(object):NEWLINE """This class represents the goniometer model. Unlike this name suggests,NEWLINE it may include translation in addition to rotationsNEWLINE """NEWLINENEWLINE _file_version_1_1 = "Goniometer calibration v1.1"NEWLINENEWLINE file_version = "Goniometer calibration v2"NEWLINENEWLINE def __init__(self, param, trans_function, detector="Detector",NEWLINE wavelength=None, param_names=None, pos_names=None):NEWLINE """Constructor of the Goniometer class.NEWLINENEWLINE :param param: vector of parameter to refine for defining the detectorNEWLINE position on the goniometerNEWLINE :param trans_function: function taking the parameters of theNEWLINE goniometer and the goniometer position and return theNEWLINE 6 parameters [dist, poni1, poni2, rot1, rot2, rot3]NEWLINE :param detector: detector mounted on the moving armNEWLINE :param wavelength: the wavelength used for the experimentNEWLINE :param param_names: list of names to "label" the param vector.NEWLINE :param pos_names: list of names to "label" the position vector ofNEWLINE the gonio.NEWLINE """NEWLINENEWLINE self.param = paramNEWLINE self.trans_function = trans_functionNEWLINE self.detector = detector_factory(detector)NEWLINE self.wavelength = wavelengthNEWLINE if param_names is None and "param_names" in dir(trans_function):NEWLINE param_names = trans_function.param_namesNEWLINE if param_names is not None:NEWLINE if isinstance(param, dict):NEWLINE self.param = [param.get(i, 0) for i in param_names]NEWLINE self.nt_param = namedtuple("GonioParam", param_names)NEWLINE else:NEWLINE self.nt_param = lambda *x: tuple(x)NEWLINE if pos_names is None and "pos_names" in dir(trans_function):NEWLINE pos_names = trans_function.pos_namesNEWLINE self.nt_pos = namedtuple("GonioPos", pos_names) if pos_names else lambda *x: tuple(x)NEWLINENEWLINE def __repr__(self):NEWLINE return "Goniometer with param %s %s with %s" % (self.nt_param(*self.param), os.linesep, self.detector)NEWLINENEWLINE def get_ai(self, position):NEWLINE """Creates an azimuthal integrator from the motor positionNEWLINENEWLINE :param position: the goniometer position, a float for a 1 axisNEWLINE goniometerNEWLINE :return: A freshly build AzimuthalIntegratorNEWLINE """NEWLINE res = self.trans_function(self.param, position)NEWLINE params = {"detector": self.detector,NEWLINE "wavelength": self.wavelength}NEWLINE for name, value in zip(res._fields, res):NEWLINE params[name] = valueNEWLINE return AzimuthalIntegrator(**params)NEWLINENEWLINE def get_mg(self, positions):NEWLINE """Creates a MultiGeometry integrator from a list of goniometerNEWLINE positions.NEWLINENEWLINE :param positions: A list of goniometer positionsNEWLINE :return: A freshly build multi-geometryNEWLINE """NEWLINE ais = [self.get_ai(pos) for pos in positions]NEWLINE mg = MultiGeometry(ais)NEWLINE return mgNEWLINENEWLINE def to_dict(self):NEWLINE """Export the goniometer configuration to a dictionaryNEWLINENEWLINE :return: Ordered dictionaryNEWLINE """NEWLINE dico = OrderedDict([("content", self.file_version)])NEWLINENEWLINE dico["detector"] = self.detector.nameNEWLINE dico["detector_config"] = self.detector.get_config()NEWLINENEWLINE if self.wavelength:NEWLINE dico["wavelength"] = self.wavelengthNEWLINE dico["param"] = tuple(self.param)NEWLINE if "_fields" in dir(self.nt_param):NEWLINE dico["param_names"] = self.nt_param._fieldsNEWLINE if "_fields" in dir(self.nt_pos):NEWLINE dico["pos_names"] = self.nt_pos._fieldsNEWLINE if "to_dict" in dir(self.trans_function):NEWLINE dico["trans_function"] = self.trans_function.to_dict()NEWLINE else:NEWLINE logger.warning("trans_function is not serializable")NEWLINE return dicoNEWLINENEWLINE def save(self, filename):NEWLINE """Save the goniometer configuration to fileNEWLINENEWLINE :param filename: name of the file to save configuration toNEWLINE """NEWLINE dico = self.to_dict()NEWLINE try:NEWLINE with open(filename, "w") as f:NEWLINE f.write(json.dumps(dico, indent=2))NEWLINE except IOError:NEWLINE logger.error("IOError while writing to file %s", filename)NEWLINE write = saveNEWLINENEWLINE @classmethodNEWLINE def _get_detector_from_dict(cls, dico):NEWLINE file_version = dico["content"]NEWLINE if file_version == cls._file_version_1_1:NEWLINE # v1.1NEWLINE # Try to extract useful keysNEWLINE detector = Detector.factory(dico["detector"])NEWLINE # This is not accurate, some keys could be missingNEWLINE keys = detector.get_config().keys()NEWLINE config = {}NEWLINE for k in keys:NEWLINE if k in dico:NEWLINE config[k] = dico[k]NEWLINE del dico[k]NEWLINE detector = Detector.factory(dico["detector"], config)NEWLINE else:NEWLINE # v2NEWLINE detector = Detector.factory(dico["detector"], dico.get("detector_config", None))NEWLINE return detectorNEWLINENEWLINE @classmethodNEWLINE def sload(cls, filename):NEWLINE """Class method for instanciating a Goniometer object from a JSON fileNEWLINENEWLINE :param filename: name of the JSON fileNEWLINE :return: Goniometer objectNEWLINE """NEWLINENEWLINE with open(filename) as f:NEWLINE dico = json.load(f)NEWLINE assert "trans_function" in dico, "No translation function defined in JSON file"NEWLINE file_version = dico["content"]NEWLINE assert file_version in [cls.file_version, cls._file_version_1_1], "JSON file contains a goniometer calibration"NEWLINE detector = cls._get_detector_from_dict(dico)NEWLINE tansfun = dico.get("trans_function", {})NEWLINE if "content" in tansfun:NEWLINE content = tansfun.pop("content")NEWLINE # May be adapted for other classes of GeometryTransformation functionsNEWLINE if content in ("GeometryTranslation", "GeometryTransformation"):NEWLINE funct = GeometryTransformation(**tansfun)NEWLINE elif content == "ExtendedTranformation":NEWLINE funct = ExtendedTransformation(**tansfun)NEWLINE else:NEWLINE raise RuntimeError("content= %s, not in in (GeometryTranslation, GeometryTransformation, ExtendedTranformation)")NEWLINE else: # assume GeometryTransformationNEWLINE funct = GeometryTransformation(**tansfun)NEWLINENEWLINE gonio = cls(param=dico.get("param", []),NEWLINE trans_function=funct,NEWLINE detector=detector,NEWLINE wavelength=dico.get("wavelength"))NEWLINE return gonioNEWLINENEWLINENEWLINEclass SingleGeometry(object):NEWLINE """This class represents a single geometry of a detector position on aNEWLINE goniometer armNEWLINE """NEWLINE def __init__(self, label, image=None, metadata=None, pos_function=None,NEWLINE control_points=None, calibrant=None, detector=None, geometry=None):NEWLINE """Constructor of the SingleGeometry class, used for calibrating aNEWLINE multi-geometry setup with a moving detector.NEWLINENEWLINE :param label: name of the geometry, a string or anything unmutableNEWLINE :param image: image with Debye-Scherrer rings as 2d numpy arrayNEWLINE :param metadata: anything which contains the goniometer positionNEWLINE :param pos_function: a function which takes the metadata as inputNEWLINE and returns the goniometer arm positionNEWLINE :param control_points: a pyFAI.control_points.ControlPoints instanceNEWLINE (optional parameter)NEWLINE :param calibrant: a pyFAI.calibrant.Calibrant instance.NEWLINE Contains the wavelength to be used (optional parameter)NEWLINE :param detector: a pyFAI.detectors.Detector instance or something likeNEWLINE that Contains the mask to be used (optional parameter)NEWLINE :param geometry: an azimuthal integrator or a ponifileNEWLINE (or a dict with the geometry) (optional parameter)NEWLINE """NEWLINE self.label = labelNEWLINE self.image = imageNEWLINE self.metadata = metadata # may be anythingNEWLINE self.calibrant = calibrantNEWLINE if control_points is None or isinstance(control_points, ControlPoints):NEWLINE self.control_points = control_pointsNEWLINE else:NEWLINE # Probaly a NPT fileNEWLINE self.control_points = ControlPoints(control_points, calibrant=calibrant)NEWLINENEWLINE if detector is not None:NEWLINE self.detector = detector_factory(detector)NEWLINE else:NEWLINE self.detector = NoneNEWLINE if isinstance(geometry, Geometry):NEWLINE dict_geo = geometry.getPyFAI()NEWLINE elif isinstance(geometry, StringTypes) and os.path.exists(geometry):NEWLINE dict_geo = Geometry.sload(geometry).getPyFAI()NEWLINE elif isinstance(geometry, dict):NEWLINE dict_geo = geometryNEWLINE if self.detector is not None:NEWLINE dict_geo["detector"] = self.detectorNEWLINE if self.control_points is not None:NEWLINE dict_geo["data"] = self.control_points.getList()NEWLINE if self.calibrant is not None:NEWLINE dict_geo["calibrant"] = self.calibrantNEWLINE if "max_shape" in dict_geo:NEWLINE # not used in constructorNEWLINE dict_geo.pop("max_shape")NEWLINE self.geometry_refinement = GeometryRefinement(**dict_geo)NEWLINE if self.detector is None:NEWLINE self.detector = self.geometry_refinement.detectorNEWLINE self.pos_function = pos_functionNEWLINE self.massif = NoneNEWLINENEWLINE def get_position(self):NEWLINE """This method is in charge of calculating the motor position from metadata/label/..."""NEWLINE return self.pos_function(self.metadata)NEWLINENEWLINE def extract_cp(self, max_rings=None, pts_per_deg=1.0, Imin=0):NEWLINE """Performs an automatic keypoint extraction and update the geometry refinement partNEWLINENEWLINE :param max_ring: extract at most N rings from the imageNEWLINE :param pts_per_deg: number of control points per azimuthal degree (increase for better precision)NEWLINE """NEWLINE if self.massif is None:NEWLINE self.massif = Massif(self.image)NEWLINENEWLINE tth = numpy.array([i for i in self.calibrant.get_2th() if i is not None])NEWLINE tth = numpy.unique(tth)NEWLINE tth_min = numpy.zeros_like(tth)NEWLINE tth_max = numpy.zeros_like(tth)NEWLINE delta = (tth[1:] - tth[:-1]) / 4.0NEWLINE tth_max[:-1] = deltaNEWLINE tth_max[-1] = delta[-1]NEWLINE tth_min[1:] = -deltaNEWLINE tth_min[0] = -delta[0]NEWLINE tth_max += tthNEWLINE tth_min += tthNEWLINE shape = self.image.shapeNEWLINE ttha = self.geometry_refinement.twoThetaArray(shape)NEWLINE chia = self.geometry_refinement.chiArray(shape)NEWLINE rings = 0NEWLINE cp = ControlPoints(calibrant=self.calibrant)NEWLINE if max_rings is None:NEWLINE max_rings = tth.sizeNEWLINENEWLINE ms = marchingsquares.MarchingSquaresMergeImpl(ttha,NEWLINE mask=self.geometry_refinement.detector.mask,NEWLINE use_minmax_cache=True)NEWLINE for i in range(tth.size):NEWLINE if rings >= max_rings:NEWLINE breakNEWLINE mask = numpy.logical_and(ttha >= tth_min[i], ttha < tth_max[i])NEWLINE if self.detector.mask is not None:NEWLINE mask = numpy.logical_and(mask, numpy.logical_not(self.geometry_refinement.detector.mask))NEWLINE size = mask.sum(dtype=int)NEWLINE if (size > 0):NEWLINE rings += 1NEWLINE sub_data = self.image.ravel()[numpy.where(mask.ravel())]NEWLINE mean = sub_data.mean(dtype=numpy.float64)NEWLINE std = sub_data.std(dtype=numpy.float64)NEWLINE upper_limit = mean + stdNEWLINE mask2 = numpy.logical_and(self.image > upper_limit, mask)NEWLINE size2 = mask2.sum(dtype=int)NEWLINE if size2 < 1000:NEWLINE upper_limit = meanNEWLINE mask2 = numpy.logical_and(self.image > upper_limit, mask)NEWLINE size2 = mask2.sum()NEWLINE # length of the arc:NEWLINE points = ms.find_pixels(tth[i])NEWLINE seeds = set((i[0], i[1]) for i in points if mask2[i[0], i[1]])NEWLINE # max number of points: 360 points for a full circleNEWLINE azimuthal = chia[points[:, 0].clip(0, shape[0]), points[:, 1].clip(0, shape[1])]NEWLINE nb_deg_azim = numpy.unique(numpy.rad2deg(azimuthal).round()).sizeNEWLINE keep = int(nb_deg_azim * pts_per_deg)NEWLINE if keep == 0:NEWLINE continueNEWLINE dist_min = len(seeds) / 2.0 / keepNEWLINE # why 3.0, why not ?NEWLINENEWLINE logger.info("Extracting datapoint for ring %s (2theta = %.2f deg); " +NEWLINE "searching for %i pts out of %i with I>%.1f, dmin=%.1f",NEWLINE i, numpy.degrees(tth[i]), keep, size2, upper_limit, dist_min)NEWLINE res = self.massif.peaks_from_area(mask2, Imin=Imin, keep=keep, dmin=dist_min, seed=seeds, ring=i)NEWLINE cp.append(res, i)NEWLINE self.control_points = cpNEWLINE self.geometry_refinement.data = numpy.asarray(cp.getList(), dtype=numpy.float64)NEWLINE return cpNEWLINENEWLINE def get_ai(self):NEWLINE """Create a new azimuthal integrator to be used.NEWLINENEWLINE :return: Azimuthal Integrator instanceNEWLINE """NEWLINE geo = self.geometry_refinement.getPyFAI()NEWLINE geo["detector"] = self.detectorNEWLINE return AzimuthalIntegrator(**geo)NEWLINENEWLINENEWLINEclass GoniometerRefinement(Goniometer):NEWLINE """This class allow the translation of a goniometer geometry into a pyFAINEWLINE geometry using a set of parameter to refine.NEWLINE """NEWLINE def __init__(self, param, pos_function, trans_function,NEWLINE detector="Detector", wavelength=None, param_names=None, pos_names=None,NEWLINE bounds=None):NEWLINE """Constructor of the GoniometerRefinement classNEWLINENEWLINE :param param: vector of parameter to refine for defining the detectorNEWLINE position on the goniometerNEWLINE :param pos_function: a function taking metadata and extracting theNEWLINE goniometer positionNEWLINE :param trans_function: function taking the parameters of theNEWLINE goniometer and the gonopmeter position and return theNEWLINE 6/7 parameters [dist, poni1, poni2, rot1, rot2, rot3, wavelength]NEWLINE :param detector: detector mounted on the moving armNEWLINE :param wavelength: the wavelength used for the experimentNEWLINE :param param_names: list of names to "label" the param vector.NEWLINE :param pos_names: list of names to "label" the position vector of theNEWLINE gonio.NEWLINE :param bounds: list of 2-tuple with the lower and upper bound of each functionNEWLINE """NEWLINE Goniometer.__init__(self, param, trans_function,NEWLINE detector=detector, wavelength=wavelength,NEWLINE param_names=param_names, pos_names=pos_names)NEWLINE self.single_geometries = OrderedDict() # a dict of labels: SingleGeometryNEWLINE if bounds is None:NEWLINE self.bounds = [(None, None)] * len(self.param)NEWLINE else:NEWLINE if isinstance(bounds, dict) and "_fields" in dir(self.nt_param):NEWLINE self.bounds = [bounds.get(i, (None, None))NEWLINE for i in self.nt_param._fields]NEWLINE else:NEWLINE self.bounds = list(bounds)NEWLINE self.pos_function = pos_functionNEWLINE self.fit_wavelength = "wavelength" in self.trans_function.codesNEWLINENEWLINE def new_geometry(self, label, image=None, metadata=None, control_points=None,NEWLINE calibrant=None, geometry=None):NEWLINE """Add a new geometry for calibrationNEWLINENEWLINE :param label: usually a stringNEWLINE :param image: 2D numpy array with the Debye scherrer ringsNEWLINE :param metadata: some metadataNEWLINE :param control_points: an instance of ControlPointsNEWLINE :param calibrant: the calibrant used for calibratingNEWLINE :param geometry: poni or AzimuthalIntegrator instance.NEWLINE """NEWLINE if geometry is None:NEWLINE geometry = self.get_ai(self.pos_function(metadata))NEWLINE sg = SingleGeometry(label=label,NEWLINE image=image,NEWLINE metadata=metadata,NEWLINE control_points=control_points,NEWLINE calibrant=calibrant,NEWLINE detector=self.detector,NEWLINE pos_function=self.pos_function,NEWLINE geometry=geometry)NEWLINE self.single_geometries[label] = sgNEWLINE return sgNEWLINENEWLINE def __repr__(self):NEWLINE name = self.__class__.__name__NEWLINE count = len(self.single_geometries)NEWLINE geometry_list = ", ".join(self.single_geometries.keys())NEWLINE return "%s with %i geometries labeled: %s." % (name, count, geometry_list)NEWLINENEWLINE def residu2(self, param):NEWLINE "Actually performs the calulation of the average of the error squared"NEWLINE sumsquare = 0.0NEWLINE npt = 0NEWLINE for single in self.single_geometries.values():NEWLINE motor_pos = single.get_position()NEWLINE single_param = self.trans_function(param, motor_pos)._asdict()NEWLINE pyFAI_param = [single_param.get(name, 0.0)NEWLINE for name in ["dist", "poni1", "poni2", "rot1", "rot2", "rot3"]]NEWLINE pyFAI_param.append(single_param.get("wavelength", self.wavelength) * 1e10)NEWLINE if (single.geometry_refinement is not None) and (len(single.geometry_refinement.data) >= 1):NEWLINE sumsquare += single.geometry_refinement.chi2_wavelength(pyFAI_param)NEWLINE npt += single.geometry_refinement.data.shape[0]NEWLINE return sumsquare / max(npt, 1)NEWLINENEWLINE def chi2(self, param=None):NEWLINE """Calculate the average of the square of the error for a given parameter setNEWLINE """NEWLINE if param is not None:NEWLINE return self.residu2(param)NEWLINE else:NEWLINE return self.residu2(self.param)NEWLINENEWLINE def refine2(self, method="slsqp", **options):NEWLINE """Geometry refinement toolNEWLINENEWLINE See https://docs.scipy.org/doc/scipy-0.18.1/reference/generated/scipy.optimize.minimize.htmlNEWLINENEWLINE :param method: name of the minimizerNEWLINE :param options: options for the minimizerNEWLINE """NEWLINE if method.lower() in ["simplex", "nelder-mead"]:NEWLINE method = "Nelder-Mead"NEWLINE bounds = NoneNEWLINE else:NEWLINE bounds = self.boundsNEWLINE former_error = self.chi2()NEWLINE print("Cost function before refinement: %s" % former_error)NEWLINE param = numpy.asarray(self.param, dtype=numpy.float64)NEWLINE print(param)NEWLINE res = minimize(self.residu2, param, method=method,NEWLINE bounds=bounds, tol=1e-12,NEWLINE options=options)NEWLINE print(res)NEWLINE newparam = res.xNEWLINE new_error = res.funNEWLINE print("Cost function after refinement: %s" % new_error)NEWLINE print(self.nt_param(*newparam))NEWLINENEWLINE # print("Constrained Least square %s --> %s" % (former_error, new_error))NEWLINE if new_error < former_error:NEWLINE # print(param, newparam)NEWLINENEWLINE i = abs(param - newparam).argmax()NEWLINE if "_fields" in dir(self.nt_param):NEWLINE name = self.nt_param._fields[i]NEWLINE print("maxdelta on: %s (%i) %s --> %s" % (name, i, self.param[i], newparam[i]))NEWLINE else:NEWLINE print("maxdelta on: %i %s --> %s" % (i, self.param[i], newparam[i]))NEWLINE self.param = newparamNEWLINE # update wavelength after successful optimization: not easyNEWLINE # if self.fit_wavelength:NEWLINE # self.wavelength = self.NEWLINE elif self.fit_wavelength:NEWLINE print("Restore wavelength and former parameters")NEWLINE former_wavelength = self.wavelengthNEWLINE for sg in self.single_geometries.values():NEWLINE sg.calibrant.setWavelength_change2th(former_wavelength)NEWLINE print(self.nt_param(*self.param))NEWLINE return self.paramNEWLINENEWLINE def set_bounds(self, name, mini=None, maxi=None):NEWLINE """Redefines the bounds for the refinementNEWLINENEWLINE :param name: name of the parameter or index in the parameter setNEWLINE :param mini: minimum valueNEWLINE :param maxi: maximum valueNEWLINE """NEWLINE if isinstance(name, StringTypes) and "_fields" in dir(self.nt_param):NEWLINE idx = self.nt_param._fields.index(name)NEWLINE else:NEWLINE idx = int(name)NEWLINE self.bounds[idx] = (mini, maxi)NEWLINENEWLINE @classmethodNEWLINE def sload(cls, filename, pos_function=None):NEWLINE """Class method for instanciating a Goniometer object from a JSON fileNEWLINENEWLINE :param filename: name of the JSON fileNEWLINE :param pos_function: a function taking metadata and extracting theNEWLINE goniometer positionNEWLINE :return: Goniometer objectNEWLINE """NEWLINENEWLINE with open(filename) as f:NEWLINE dico = json.load(f)NEWLINE assert dico["content"] == cls.file_version, "JSON file contains a goniometer calibration"NEWLINE assert "trans_function" in dico, "No translation function defined in JSON file"NEWLINE detector = cls._get_detector_from_dict(dico)NEWLINE tansfun = dico.get("trans_function", {})NEWLINE if "content" in tansfun:NEWLINE content = tansfun.pop("content")NEWLINE # May be adapted for other classes of GeometryTransformation functionsNEWLINE if content in ("GeometryTranslation", "GeometryTransformation"):NEWLINE funct = GeometryTransformation(**tansfun)NEWLINE elif content == "ExtendedTranformation":NEWLINE funct = ExtendedTransformation(**tansfun)NEWLINE else:NEWLINE raise RuntimeError("content= %s, not in in (GeometryTranslation, GeometryTransformation, ExtendedTranformation)")NEWLINE else: # assume GeometryTransformationNEWLINE funct = GeometryTransformation(**tansfun)NEWLINENEWLINE gonio = cls(param=dico.get("param", []),NEWLINE trans_function=funct,NEWLINE pos_function=pos_function,NEWLINE detector=detector,NEWLINE wavelength=dico.get("wavelength"))NEWLINE return gonioNEWLINE |
from __future__ import annotations # type: ignore[attr-defined]NEWLINEfrom dataclasses import dataclass, fieldNEWLINEfrom enum import EnumNEWLINEfrom typing import (NEWLINE Callable,NEWLINE Dict,NEWLINE List,NEWLINE Optional,NEWLINE UnionNEWLINE)NEWLINEimport weakrefNEWLINENEWLINEimport copyNEWLINEimport threadingNEWLINEimport torchNEWLINEimport torch.distributed as distNEWLINEfrom torch.distributed import rpcNEWLINEfrom torch.distributed import distributed_c10dNEWLINEfrom torch.distributed._shard.sharding_spec import (NEWLINE ChunkShardingSpec,NEWLINE EnumerableShardingSpec,NEWLINE ShardMetadata,NEWLINE ShardingSpec,NEWLINE)NEWLINEfrom torch.distributed._shard.sharding_spec._internals import (NEWLINE check_tensor,NEWLINE get_split_size,NEWLINE get_chunked_dim_size,NEWLINE validate_non_overlapping_shards_metadata,NEWLINE)NEWLINEfrom torch.distributed.nn.functional import (NEWLINE reduce_scatter,NEWLINE)NEWLINEfrom torch.types import NumberNEWLINEfrom .metadata import TensorProperties, ShardedTensorMetadataNEWLINEfrom .shard import ShardNEWLINEfrom .reshard import reshuffle_local_shard, reshard_local_shardNEWLINEfrom .utils import (NEWLINE get_current_process_group,NEWLINE _flatten_tensor_size,NEWLINE _parse_and_validate_remote_device,NEWLINE _validate_output_tensor_for_gather,NEWLINE build_metadata_from_local_shards,NEWLINE build_global_metadataNEWLINE)NEWLINENEWLINE# Tracking for sharded tensor objects.NEWLINE_sharded_tensor_lock = threading.Lock()NEWLINE_sharded_tensor_current_id = 0NEWLINE_sharded_tensor_map: Dict[int, 'weakref.ReferenceType[ShardedTensor]'] = {}NEWLINENEWLINE# Custom sharded opsNEWLINE_SHARDED_OPS: Dict[str, Callable] = {}NEWLINEdef _register_sharded_op(op, func):NEWLINE from inspect import signatureNEWLINE if len(signature(func).parameters) != 4:NEWLINE raise TypeError(NEWLINE f'Custom sharded op function expects signature: 'NEWLINE f'(types, args, kwargs, process_group), but received 'NEWLINE f'signature: {signature(func)}')NEWLINENEWLINE global _SHARDED_OPSNEWLINE _SHARDED_OPS[op] = funcNEWLINENEWLINEdef _register_remote_shards(sharded_tensor_id: int, rrefs: List[rpc.RRef[Shard]], rpc_rank: int):NEWLINE with _sharded_tensor_lock:NEWLINE if sharded_tensor_id not in _sharded_tensor_map:NEWLINE raise RuntimeError(NEWLINE f'Could not find sharded_tensor_id: {sharded_tensor_id} in map: {_sharded_tensor_map.keys()}')NEWLINENEWLINE sharded_tensor = _sharded_tensor_map[sharded_tensor_id]()NEWLINE if sharded_tensor is None:NEWLINE raise RuntimeError('ShardedTensor weakref has been deallocated')NEWLINE else:NEWLINE sharded_tensor._register_remote_shards(rrefs, rpc_rank)NEWLINENEWLINENEWLINEclass CreateOp(Enum):NEWLINE EMPTY = 0NEWLINE FULL = 1NEWLINE ONES = 2NEWLINE RAND = 3NEWLINE ZEROS = 4NEWLINENEWLINENEWLINE@dataclassNEWLINEclass TensorInitParams(object):NEWLINE """ Container for list of common params to create new local tensor. """NEWLINENEWLINE create_op: CreateOpNEWLINENEWLINE # needed when create_op is FULLNEWLINE # default set to False (not None) since None is incompatible with Number.NEWLINE fill_value: Number = field(default=False)NEWLINENEWLINE tensor_properties: TensorProperties = field(NEWLINE default=TensorProperties(dtype=torch.get_default_dtype(),NEWLINE layout=torch.strided,NEWLINE requires_grad=False,NEWLINE memory_format=torch.contiguous_format,NEWLINE pin_memory=False))NEWLINENEWLINENEWLINENEWLINEclass ShardedTensor(object):NEWLINE """NEWLINE ShardedTensor is an abstraction to represent Tensors that are shardedNEWLINE across multiple devices and multiple processes.NEWLINENEWLINE ShardedTensor is initialized in an SPMD like fashion where each rankNEWLINE initializes the ShardedTensor. The ShardedTensor object on each rankNEWLINE then only stores the local shard for the Tensor and provides globalNEWLINE metadata for all the shards.NEWLINENEWLINE ShardedTensor doesn't provide any Tensor like operations but is a wrapperNEWLINE providing the Tensor representing the local shard and the global metadata.NEWLINE Using these, users can build their custom distributed._sharded computationsNEWLINE on top of this primitive. The local shards are all initialized using theNEWLINE create_op specified by tensor_init_params.create_op, e.g., torch.ones, orNEWLINE torch.emptyNEWLINENEWLINE Args:NEWLINE sharding_spec (:class:`torch.distributed._shard.sharding_spec.ShardingSpec`): The specificationNEWLINE describing how to shard the Tensor.NEWLINE size (int...): a sequence of integers defining the shape of the outputNEWLINE tensor. Can be a variable number of arguments or a collection like a list or tuple.NEWLINENEWLINE Keyword args:NEWLINE tensor_init_params (:class: `TensorInitParams`): common params to create tensor.NEWLINE init_rrefs (bool, optional): Whether or not to initializeNEWLINE :class:`torch.distributed.rpc.RRef`s pointing to remote shards.NEWLINE Need to initialize the RPC Framework if specified as ``True``.NEWLINE Default: ``False``.NEWLINENEWLINE .. note:: ShardedTensor uses collectives to do various operations, i.e. itNEWLINE uses all_gather to do cross rank validations. For NCCL-based processedNEWLINE groups, internal tensor representations of objects must be moved to theNEWLINE GPU device before communication takes place. In this case, the deviceNEWLINE used is given by ``torch.cuda.current_device()`` and it is the user'sNEWLINE responsiblity to ensure that this is set so that each rank has anNEWLINE individual GPU, via ``torch.cuda.set_device()``NEWLINENEWLINE """NEWLINENEWLINE def __new__(cls, *args, **kwargs):NEWLINE # Use __new__ for logging purposes.NEWLINE torch._C._log_api_usage_once("torch.distributed._shard.sharded_tensor")NEWLINE return super(ShardedTensor, cls).__new__(cls)NEWLINENEWLINE def __init__(NEWLINE self,NEWLINE sharding_spec: ShardingSpec,NEWLINE *size,NEWLINE tensor_init_params: TensorInitParams,NEWLINE process_group=None,NEWLINE init_rrefs=False,NEWLINE ):NEWLINE # prepare initialization, initialize fields likeNEWLINE # _process_group, _local_shards, etc.NEWLINE self._prepare_init(process_group=process_group, init_rrefs=init_rrefs)NEWLINENEWLINE if tensor_init_params.tensor_properties is None:NEWLINE raise ValueError('tensor_properties must not be None.')NEWLINENEWLINE if tensor_init_params.tensor_properties.dtype is None:NEWLINE tensor_init_params.tensor_properties.dtype = torch.get_default_dtype()NEWLINENEWLINE if tensor_init_params.tensor_properties.layout != torch.strided:NEWLINE raise ValueError('Only torch.strided layout is currently supported')NEWLINENEWLINE if tensor_init_params.tensor_properties.memory_format != torch.contiguous_format:NEWLINE raise ValueError('Only torch.contiguous_format memory_format is currently supported')NEWLINENEWLINE dims = _flatten_tensor_size(size)NEWLINENEWLINE self._sharding_spec = sharding_specNEWLINENEWLINE if isinstance(self._sharding_spec, ChunkShardingSpec):NEWLINE self._init_chunked(dims, tensor_init_params)NEWLINE elif isinstance(self._sharding_spec, EnumerableShardingSpec):NEWLINE self._init_enumerable(dims, tensor_init_params)NEWLINE else:NEWLINE raise ValueError(f'Unsupported sharding_spec: {self._sharding_spec}')NEWLINENEWLINE # do post initialization (i.e. register sharded_tensor_id, initialize_rpc)NEWLINE self._post_init()NEWLINENEWLINE def _prepare_init(self, process_group=None, init_rrefs=False):NEWLINE self._init_rrefs = init_rrefsNEWLINE self._sharded_tensor_id = NoneNEWLINENEWLINE self._process_group = (NEWLINE process_groupNEWLINE if process_group is not NoneNEWLINE else distributed_c10d._get_default_group()NEWLINE )NEWLINENEWLINE self._local_shards: List[Shard] = []NEWLINE self._remote_shards: Dict[int, List[rpc.RRef[Shard]]] = {}NEWLINENEWLINE def _post_init(self):NEWLINE # Initialize RPC if available.NEWLINE if self._init_rrefs:NEWLINE with _sharded_tensor_lock:NEWLINE global _sharded_tensor_current_id, _sharded_tensor_mapNEWLINE self._sharded_tensor_id = _sharded_tensor_current_idNEWLINE _sharded_tensor_map[self._sharded_tensor_id] = weakref.ref(self)NEWLINE _sharded_tensor_current_id += 1NEWLINENEWLINE if not rpc._is_current_rpc_agent_set():NEWLINE raise RuntimeError(NEWLINE 'RPC Framework needs to be initialized using'NEWLINE ' torch.distributed.rpc.init_rpc if init_rrefs is set to True')NEWLINE self._init_rpc()NEWLINENEWLINE def __del__(self):NEWLINE # Clean up the global map.NEWLINE with _sharded_tensor_lock:NEWLINE global _sharded_tensor_current_id, _sharded_tensor_mapNEWLINE if self._sharded_tensor_id in _sharded_tensor_map:NEWLINE _sharded_tensor_map.pop(self._sharded_tensor_id) # type: ignore[call-overload]NEWLINENEWLINE def _init_rpc(self):NEWLINE # Validate PG and RPC ranks match.NEWLINE pg_rank = dist.get_rank()NEWLINE rpc_rank = rpc.get_worker_info().idNEWLINE if pg_rank != rpc_rank:NEWLINE raise ValueError(NEWLINE f'Default ProcessGroup and RPC ranks must be 'NEWLINE f'the same for ShardedTensor, found process group rank: 'NEWLINE f'{pg_rank} and RPC rank: {rpc_rank}'NEWLINE )NEWLINENEWLINE self._remote_shards = {}NEWLINENEWLINE # Gather all the sharded tensor ids.NEWLINE worker_infos = rpc._get_current_rpc_agent().get_worker_infos()NEWLINE rank_to_name = {}NEWLINE name_to_rank = {}NEWLINENEWLINE for worker_info in worker_infos:NEWLINE rank_to_name[worker_info.id] = worker_info.nameNEWLINE name_to_rank[worker_info.name] = worker_info.idNEWLINENEWLINE all_tensor_ids = rpc.api._all_gather(self._sharded_tensor_id)NEWLINENEWLINE # Share the local shards to the entire world.NEWLINE futs = []NEWLINE rpc_rank = rpc.get_worker_info().idNEWLINE for rank in range(dist.get_world_size()):NEWLINE # Skip self.NEWLINE if rank == dist.get_rank():NEWLINE continueNEWLINENEWLINE if len(self.local_shards()) != 0:NEWLINE rrefs: List[rpc.RRef[Shard]] = [rpc.RRef(shard) for shard in self.local_shards()]NEWLINE fut = rpc.rpc_async(NEWLINE rank,NEWLINE _register_remote_shards,NEWLINE args=(all_tensor_ids[rank_to_name[rank]], rrefs, rpc_rank))NEWLINE futs.append(fut)NEWLINENEWLINE torch.futures.wait_all(futs)NEWLINENEWLINE # Barrier for all RPCs to finish on all ranks.NEWLINE rpc.api._all_gather(None)NEWLINENEWLINE def gather(NEWLINE self,NEWLINE dst: int = 0,NEWLINE out: Optional[torch.Tensor] = None,NEWLINE ) -> None:NEWLINE """NEWLINE Creates a full :class:`Tensor` on rank ``dst`` by gathering all shards of theNEWLINE sharded tensor.NEWLINENEWLINE The API needs to be called on all ranks in SPMD fashion. All ranks should haveNEWLINE the same ``dst``. ``out`` should be a tensor of the same size as the overallNEWLINE size of the sharded tensor on ``dst`` and ``None`` on all other ranks.NEWLINENEWLINE Args:NEWLINE dst(int): The rank where full tensor is constructed.NEWLINE Default: 0NEWLINE out (:class `torch.Tensor`, optional): The output full tensor.NEWLINE Must to be provided ONLY on ``dst`` rank.NEWLINE Default: ``None``NEWLINE """NEWLINE rank = dist.get_rank(self._process_group)NEWLINE full_size = self.metadata().sizeNEWLINE _validate_output_tensor_for_gather(rank, dst, full_size, out)NEWLINENEWLINE local_shards = self.local_shards()NEWLINENEWLINE world_size = dist.get_world_size(self._process_group)NEWLINENEWLINE gathered_shards = [None] * world_sizeNEWLINE # will revise this part with CPU support and use dist.gather()NEWLINE # once NCCL support for gather() is readyNEWLINE # https://github.com/pytorch/pytorch/issues/66187NEWLINE dist.all_gather_object(NEWLINE obj=local_shards,NEWLINE object_list=gathered_shards,NEWLINE group=self._process_group,NEWLINE )NEWLINENEWLINE if rank == dst:NEWLINE dims = len(full_size)NEWLINE for shards in gathered_shards:NEWLINE if shards is None:NEWLINE raise RuntimeError(NEWLINE 'Gathered shards cannot be None on dst rank {dst}'NEWLINE )NEWLINE for shard in shards:NEWLINE metadata = shard.metadataNEWLINE tensor = shard.tensorNEWLINENEWLINE out_narrow_view = outNEWLINE for dim in range(dims):NEWLINE out_narrow_view = out_narrow_view.narrow(NEWLINE dim,NEWLINE metadata.shard_offsets[dim],NEWLINE metadata.shard_sizes[dim],NEWLINE )NEWLINENEWLINE out_narrow_view.copy_(tensor)NEWLINENEWLINE @classmethodNEWLINE def _init_from_local_shards(NEWLINE cls,NEWLINE local_shards: List[Shard],NEWLINE *global_size,NEWLINE process_group=None,NEWLINE init_rrefs=False,NEWLINE ):NEWLINE # STEP 1: Validate the Shardmetadatas locallyNEWLINE process_group = (NEWLINE process_groupNEWLINE if process_group is not NoneNEWLINE else distributed_c10d._get_default_group()NEWLINE )NEWLINE current_rank = dist.get_rank(process_group)NEWLINE world_size = dist.get_world_size(process_group)NEWLINENEWLINE local_sharded_tensor_metadata: Optional[ShardedTensorMetadata] = NoneNEWLINE global_tensor_size = _flatten_tensor_size(global_size)NEWLINENEWLINE if len(local_shards) > 0:NEWLINE local_sharded_tensor_metadata = \NEWLINE build_metadata_from_local_shards(local_shards, global_tensor_size, current_rank, process_group)NEWLINENEWLINE # STEP 2. Validate metadata across ranks, and build a global sharded tensorNEWLINE # metadata by gathering local ShardedTensorMetadataNEWLINE gathered_metadatas: List[Optional[ShardedTensorMetadata]] = []NEWLINE if world_size > 1:NEWLINE gathered_metadatas = [None for _ in range(world_size)]NEWLINENEWLINE dist.all_gather_object(NEWLINE gathered_metadatas,NEWLINE local_sharded_tensor_metadata,NEWLINE group=process_groupNEWLINE )NEWLINE else:NEWLINE gathered_metadatas = [local_sharded_tensor_metadata]NEWLINENEWLINE global_sharded_tensor_metadata = build_global_metadata(gathered_metadatas)NEWLINENEWLINE # STEP 3: Validation done, create the actual ShardedTensor and populate fieldsNEWLINE # prepare initializationNEWLINE sharded_tensor = cls.__new__(cls)NEWLINE sharded_tensor._prepare_init(process_group=process_group, init_rrefs=init_rrefs)NEWLINENEWLINE # add to metadata and local_shardsNEWLINE sharded_tensor._metadata = global_sharded_tensor_metadataNEWLINE sharded_tensor._local_shards = local_shardsNEWLINE # make a EnumerableShardingSpec for sharded tensors that initialized from this API.NEWLINE # TODO: make sharding spec a ChunkShardingSpec by inferring from the metadata list.NEWLINE # see issue https://github.com/pytorch/pytorch/issues/67244NEWLINE sharded_tensor._sharding_spec = EnumerableShardingSpec(global_sharded_tensor_metadata.shards_metadata)NEWLINENEWLINE # run post initialization, i.e. map registration, rpc initializationNEWLINE sharded_tensor._post_init()NEWLINE return sharded_tensorNEWLINENEWLINE @classmethodNEWLINE def _init_from_local_shards_and_global_metadata(NEWLINE cls,NEWLINE local_shards: List[Shard],NEWLINE sharded_tensor_metadata: ShardedTensorMetadata,NEWLINE process_group=None,NEWLINE init_rrefs=False,NEWLINE ) -> "ShardedTensor":NEWLINE """NEWLINE Initialize a ShardedTensor with local shards and a globalNEWLINE ShardedTensorMetadata built on each rank.NEWLINENEWLINE Warning: This API is experimental and subject to change. It doesNEWLINE not do cross rank validations, and fully rely on the userNEWLINE for the correctness of sharded_tensor_metadata on each rankNEWLINE """NEWLINE process_group = (NEWLINE process_groupNEWLINE if process_group is not NoneNEWLINE else distributed_c10d._get_default_group()NEWLINE )NEWLINE current_rank = dist.get_rank(process_group)NEWLINENEWLINE shards_metadata = sharded_tensor_metadata.shards_metadataNEWLINE tensor_properties = sharded_tensor_metadata.tensor_propertiesNEWLINENEWLINE if len(shards_metadata) == 0:NEWLINE raise ValueError("shards_metadata must not be empty!")NEWLINENEWLINE if tensor_properties.layout != torch.strided:NEWLINE raise ValueError('Only torch.strided layout is currently supported')NEWLINENEWLINE sharded_tensor = cls.__new__(cls)NEWLINE sharded_tensor._prepare_init(process_group=process_group, init_rrefs=init_rrefs)NEWLINENEWLINE sharded_tensor._metadata = sharded_tensor_metadataNEWLINENEWLINE local_shard_metadatas = []NEWLINENEWLINE def _raise_if_mismatch(expected, actual, prop_name, rank, is_property=False):NEWLINE tensor_property_or_metadata = "tensor property" if is_property else "local ShardMetadata"NEWLINE if expected != actual:NEWLINE raise ValueError(f"Local shards' tensor {prop_name} property is incompatible with "NEWLINE f"{tensor_property_or_metadata} on rank {rank}: "NEWLINE f"{tensor_property_or_metadata} {prop_name}={expected}, "NEWLINE f"local shard tensor {prop_name}={actual}.")NEWLINENEWLINE # collect local shard metadatas from the global sharded_tensor_metadataNEWLINE for shard_metadata in shards_metadata: # type: ignore[attr-defined]NEWLINE rank, local_device = _parse_and_validate_remote_device(sharded_tensor._process_group, shard_metadata.placement)NEWLINENEWLINE if current_rank == rank:NEWLINE local_shard_metadatas.append(shard_metadata)NEWLINENEWLINE if len(local_shards) != len(local_shard_metadatas):NEWLINE raise RuntimeError(NEWLINE f'Number of local shards ({len(local_shards)}) does not match number of local 'NEWLINE f'shards metadata in sharded_tensor_metadata ({len(local_shard_metadatas)}) 'NEWLINE f'on rank ({current_rank}) 'NEWLINE )NEWLINENEWLINE for shard in local_shards:NEWLINE shard_meta = shard.metadataNEWLINE local_shard_tensor = shard.tensorNEWLINE rank, local_device = _parse_and_validate_remote_device(sharded_tensor._process_group, shard_meta.placement)NEWLINENEWLINE # validate if shard_meta in the metadatas collected from sharded_tensor_metadataNEWLINE assert shard_meta in local_shard_metadatas, \NEWLINE "local shard metadata not in sharded_tensor_metadata!"NEWLINENEWLINE _raise_if_mismatch(tensor_properties.layout, local_shard_tensor.layout, "layout", current_rank, True)NEWLINE if not local_shard_tensor.is_contiguous():NEWLINE raise ValueError('Only torch.contiguous_format memory_format is currently supported')NEWLINENEWLINE _raise_if_mismatch(shard_meta.shard_sizes, list(local_shard_tensor.size()), "size", current_rank)NEWLINE _raise_if_mismatch(tensor_properties.pin_memory, local_shard_tensor.is_pinned(), "pin_memory", current_rank, True)NEWLINE _raise_if_mismatch(local_device, local_shard_tensor.device, "device", current_rank)NEWLINE _raise_if_mismatch(tensor_properties.dtype, local_shard_tensor.dtype, "dtype", current_rank, True)NEWLINE _raise_if_mismatch(NEWLINE tensor_properties.requires_grad, local_shard_tensor.requires_grad, "requires_grad", current_rank, True)NEWLINENEWLINE # check if shards_metadata have overlap shardsNEWLINE validate_non_overlapping_shards_metadata(shards_metadata)NEWLINENEWLINE # check if the shards_metadata is compatible with overall size of the sharded tensor.NEWLINE check_tensor(shards_metadata, list(sharded_tensor_metadata.size))NEWLINENEWLINE # done validation, add local_shardsNEWLINE sharded_tensor._local_shards = local_shardsNEWLINE # make a EnumerableShardingSpec for sharded tensors that initialized from this API.NEWLINE # TODO: make sharding spec a ChunkShardingSpec by inferring from the metadata list.NEWLINE # see issue https://github.com/pytorch/pytorch/issues/67244NEWLINE sharded_tensor._sharding_spec = EnumerableShardingSpec(shards_metadata)NEWLINENEWLINE # run post initialization, i.e. map registration, rpc initializationNEWLINE sharded_tensor._post_init()NEWLINE return sharded_tensorNEWLINENEWLINENEWLINE def _init_chunked(self, dims, tensor_init_params: TensorInitParams, ):NEWLINE current_rank = dist.get_rank(self._process_group)NEWLINE sharding_dim = self._sharding_spec.dim # type: ignore[attr-defined]NEWLINENEWLINE # Validate the sharding spec.NEWLINE if not isinstance(sharding_dim, int):NEWLINE raise ValueError(NEWLINE f"Sharding dim needs to be an integer, found: {sharding_dim}"NEWLINE )NEWLINE if sharding_dim >= len(dims) or sharding_dim < -len(dims):NEWLINE raise ValueError(f"Invalid sharding dim: {sharding_dim}")NEWLINENEWLINE dim_size = dims[sharding_dim]NEWLINE remote_devices = self._sharding_spec.placements # type: ignore[attr-defined]NEWLINE chunks = len(remote_devices)NEWLINE # split_size computed similar to 'torch.chunk'NEWLINE split_size = get_split_size(dim_size, chunks)NEWLINENEWLINE shards_metadata = []NEWLINE for idx, remote_device in enumerate(remote_devices):NEWLINE rank, local_device = _parse_and_validate_remote_device(self._process_group, remote_device)NEWLINENEWLINE # Adjust the sharding dim for this rank.NEWLINE sharded_dim_size = get_chunked_dim_size(dim_size, split_size, idx)NEWLINENEWLINE if sharded_dim_size > 0:NEWLINE # Build sharding_metadata.NEWLINENEWLINE # deepcopy for modification.NEWLINE rank_dims = dims.copy()NEWLINENEWLINE rank_offsets = [0] * len(dims)NEWLINE rank_offsets[sharding_dim] = split_size * idxNEWLINE rank_dims[sharding_dim] = sharded_dim_sizeNEWLINENEWLINE shard_metadata = ShardMetadata(rank_offsets, rank_dims, remote_device)NEWLINE shards_metadata.append(shard_metadata)NEWLINENEWLINE # Build the local shard for the current rank if it is involved in the sharding spec.NEWLINE if current_rank == rank:NEWLINE # Initialize the local shard.NEWLINE local_shard = _create_tensor_from_params(NEWLINE *rank_dims, local_device=local_device, tensor_init_params=tensor_init_params)NEWLINE self._local_shards.append(Shard(local_shard, shard_metadata))NEWLINENEWLINE # Build overall metadataNEWLINE self._metadata = ShardedTensorMetadata(NEWLINE shards_metadata, dims, tensor_init_params.tensor_properties, )NEWLINENEWLINE def _init_enumerable(self, dims, tensor_init_params: TensorInitParams):NEWLINE # Validate the sharding spec is compatible with the tensor.NEWLINE check_tensor(self._sharding_spec.shards, dims) # type: ignore[attr-defined]NEWLINENEWLINE current_rank = dist.get_rank(self._process_group)NEWLINENEWLINE shards_metadata = []NEWLINE for shard_metadata in self._sharding_spec.shards: # type: ignore[attr-defined]NEWLINE rank, local_device = _parse_and_validate_remote_device(self._process_group, shard_metadata.placement)NEWLINE shards_metadata.append(shard_metadata)NEWLINENEWLINE if current_rank == rank:NEWLINE # Initialize the local shard.NEWLINE local_shard = _create_tensor_from_params(NEWLINE *shard_metadata.shard_sizes, local_device=local_device,NEWLINE tensor_init_params=tensor_init_params)NEWLINE self._local_shards.append(Shard(local_shard, shard_metadata))NEWLINENEWLINE # Build overall metadataNEWLINE self._metadata = ShardedTensorMetadata(NEWLINE shards_metadata, dims, tensor_init_params.tensor_properties, )NEWLINENEWLINE def sharding_spec(self) -> ShardingSpec:NEWLINE """NEWLINE Returns the ShardingSpec for the tensor.NEWLINE """NEWLINE return self._sharding_specNEWLINENEWLINE def reshard(self, resharding_spec: ShardingSpec) -> ShardedTensor:NEWLINE """NEWLINE Reshard a sharded tensor given the ``resharding_spec``. For now, we only supportNEWLINE single local shard.NEWLINENEWLINE If ``resharding_spec`` is same as the original one, this becomes a no-op.NEWLINE If only ``resharding_spec`` shares the same sharding dim with the original one,NEWLINE we swap local shards directly.NEWLINE For more generic cases, we merge different shards across different ranks and splitNEWLINE the local shards based on the ``resharding_spec`` via `all_to_all` collective API.NEWLINENEWLINE Args:NEWLINE resharding_spec (:class:`torch.distributed._shard.sharding_spec.ShardingSpec`): TheNEWLINE specification describing how the tensor is sharded.NEWLINENEWLINE Returns:NEWLINE A :class:`ShardedTensor` object whose local shards are resharded.NEWLINENEWLINE Examples:NEWLINE >>> # We have 2 process groups, 2 ranks.NEWLINE >>> tensor = torch.arange(4, dtype=torch.int64) + 1 + 2 * rankNEWLINE >>> tensor = torch.stack([tensor, tensor])NEWLINE >>> tensorNEWLINE tensor([[1, 2, 3, 4], [1, 2, 3, 4]]) # Rank 0NEWLINE tensor([[3, 4, 5, 6], [3, 4, 5, 6]]) # Rank 1NEWLINE tensor([[5, 6, 7, 8], [5, 6, 7, 8]]) # Rank 2NEWLINE tensor([[7, 8, 9, 10], [7, 8, 9, 10]]) # Rank 3NEWLINE >>> sharding_dim = 0NEWLINE >>> spec = ChunkShardingSpec(NEWLINE dim=sharding_dim,NEWLINE placements=[NEWLINE "rank:0/cuda:0",NEWLINE "rank:1/cuda:1",NEWLINE "rank:2/cuda:2",NEWLINE "rank:3/cuda:3",NEWLINE ],NEWLINE )NEWLINE >>> current_offsets = [0] * 2NEWLINE >>> current_offsets[0] = rank * 2NEWLINE >>> shard_metadata = ShardMetadata(NEWLINE shard_offsets=copy.deepcopy(current_offsets),NEWLINE shard_sizes=tensor.size(),NEWLINE placement=spec.placements[rank],NEWLINE )NEWLINE >>> local_shards = [NEWLINE Shard(NEWLINE tensor=tensor,NEWLINE metadata=shard_metadata,NEWLINE )NEWLINE ]NEWLINE >>> st = ShardedTensor._init_from_local_shards(local_shards, tensor.size())NEWLINE >>> sharding_dim = 1NEWLINE >>> resharding_spec = ChunkShardingSpec(NEWLINE dim=sharding_dim,NEWLINE placements=[NEWLINE "rank:0/cuda:0",NEWLINE "rank:1/cuda:1",NEWLINE "rank:2/cuda:2",NEWLINE "rank:3/cuda:3",NEWLINE ],NEWLINE )NEWLINE >>> st.reshard(resharding_spec)NEWLINE >>> tensor = st.local_shards()[0].tensorNEWLINE >>> tensorNEWLINE tensor([[1], [1], [3], [3], [5], [5], [7], [7]]) # Rank 0NEWLINE tensor([[2], [2], [4], [4], [6], [6], [8], [8]]) # Rank 1NEWLINE tensor([[3], [3], [5], [5], [7], [7], [9], [9]]) # Rank 2NEWLINE tensor([[4], [4], [6], [6], [8], [8], [10], [10]]) # Rank 3NEWLINE """NEWLINE if (NEWLINE not isinstance(resharding_spec, ChunkShardingSpec) orNEWLINE not isinstance(self._sharding_spec, ChunkShardingSpec)NEWLINE ):NEWLINE raise NotImplementedError("Only ChunkShardingSpec supported for reshard.")NEWLINE if (len(self.local_shards()) != 1):NEWLINE raise NotImplementedError("Only single local shard supported for reshard.")NEWLINENEWLINE if self._sharding_spec.dim == resharding_spec.dim: # type: ignore[attr-defined]NEWLINE if self._sharding_spec.placements == resharding_spec.placements: # type: ignore[attr-defined]NEWLINE return selfNEWLINE else:NEWLINE local_shards, shards_metadata = reshuffle_local_shard(NEWLINE self.local_tensor(),NEWLINE self.size(), # type: ignore[arg-type]NEWLINE self._sharding_spec,NEWLINE resharding_spec,NEWLINE self._process_group,NEWLINE )NEWLINE else:NEWLINE local_shards, shards_metadata = reshard_local_shard(NEWLINE self.local_tensor(),NEWLINE self.size(), # type: ignore[arg-type]NEWLINE self._sharding_spec,NEWLINE resharding_spec,NEWLINE self._process_group,NEWLINE )NEWLINE self._local_shards = local_shardsNEWLINE self._metadata.shards_metadata = shards_metadataNEWLINE self._sharding_spec = resharding_specNEWLINE return selfNEWLINENEWLINE def local_tensor(self) -> torch.Tensor:NEWLINE """NEWLINE Return local tensor for a sharded_tensor. For now we only support single local shard.NEWLINENEWLINE Returns:NEWLINE A :class:`torch.Tensor` of the local shard.NEWLINE """NEWLINE if len(self.local_shards()) != 1:NEWLINE raise NotImplementedError("Only single local shard is supported.")NEWLINE return self.local_shards()[0].tensorNEWLINENEWLINE def __torch_function__(self, func, types, args=(), kwargs=None):NEWLINE if func in _SHARDED_OPS:NEWLINE return _SHARDED_OPS[func](types, args, kwargs, self._process_group)NEWLINE raise RuntimeError(NEWLINE f"torch function '{func.__name__}', with args: {args} and "NEWLINE f"kwargs: {kwargs} not supported for ShardedTensor!")NEWLINENEWLINE def metadata(self) -> ShardedTensorMetadata:NEWLINE """NEWLINE Returns a :class:`ShardedTensorMetadata` object corresponding to theNEWLINE metadata for the entire tensor.NEWLINE """NEWLINE return self._metadataNEWLINENEWLINE def local_shards(self) -> List[Shard]:NEWLINE """NEWLINE Returns a list of :class:`Shard' corresponding to theNEWLINE local shards for this rank. Returns an empty list if the current rankNEWLINE does not host any shards for this Tensor.NEWLINE """NEWLINE return self._local_shardsNEWLINENEWLINE def size(self, dim: int = None) -> Union[torch.Size, int]:NEWLINE """NEWLINE Returns a :Union:`[torch.Size, int]` which represents the size of the tensor.NEWLINE The dimension can be specified.NEWLINENEWLINE Args:NEWLINE dim (int, optional): the dimension over which the size represents.NEWLINE If specified, it returns the size of the given dimension.NEWLINE If not, it returns a subclass of tuple.NEWLINE Default: ``None``NEWLINENEWLINE Returns:NEWLINE A :Union:`[torch.Size, int]` represents the size of the tensor.NEWLINE """NEWLINE size = self._metadata.sizeNEWLINE if dim is None:NEWLINE return sizeNEWLINE if dim < 0 or dim >= len(size):NEWLINE raise ValueError(NEWLINE f"Argument ``dim`` must be within the range of tensor dimensions [0, {len(size)})"NEWLINE )NEWLINE return size[dim]NEWLINENEWLINENEWLINE def is_pinned(self) -> bool:NEWLINE """NEWLINE Returns True if the sharded tensor (each local shard) resides in pinned memory.NEWLINE """NEWLINE return self._metadata.tensor_properties.pin_memoryNEWLINENEWLINE def is_contiguous(self) -> bool:NEWLINE """NEWLINE Returns True if the sharded tensor (each local shard) is contiguous in memoryNEWLINE in the order specified by memory format.NEWLINE """NEWLINE return self._metadata.tensor_properties.memory_format == torch.contiguous_formatNEWLINENEWLINE @propertyNEWLINE def shape(self):NEWLINE return self._metadata.sizeNEWLINENEWLINE @propertyNEWLINE def requires_grad(self):NEWLINE return self._metadata.tensor_properties.requires_gradNEWLINENEWLINE @propertyNEWLINE def dtype(self):NEWLINE return self._metadata.tensor_properties.dtypeNEWLINENEWLINE @propertyNEWLINE def layout(self):NEWLINE return self._metadata.tensor_properties.layoutNEWLINENEWLINE def _register_remote_shards(self, remote_shards: List[rpc.RRef[Shard]], rpc_rank: int):NEWLINE self._remote_shards[rpc_rank] = remote_shardsNEWLINENEWLINE def remote_shards(self) -> Dict[int, List[rpc.RRef[Shard]]]:NEWLINE """NEWLINE Returns a Dict[int, RRef] with keys being the RPC rank and valuesNEWLINE being RRefs to shards on that rank. Need to initialize theNEWLINE RPC framework for this functionality.NEWLINENEWLINE Raises an exception if ShardedTensor was created with ``init_rrefs=False``NEWLINE """NEWLINE if not self._init_rrefs:NEWLINE raise RuntimeError(NEWLINE 'ShardedTensor created with init_rrefs=False, no RRefs to remote shards available'NEWLINE )NEWLINE return self._remote_shardsNEWLINENEWLINE def __hash__(self):NEWLINE return id(self)NEWLINENEWLINE def __repr__(self):NEWLINE return f'ShardedTensor({self._metadata})'NEWLINENEWLINE @dataclassNEWLINE class ProcessGroupState:NEWLINE """NEWLINE State for ser-de of process groupNEWLINE """NEWLINE local_rank: intNEWLINE global_rank: intNEWLINE local_world_size: intNEWLINE global_world_size: intNEWLINENEWLINE def __getstate__(self):NEWLINE pg_state = ShardedTensor.ProcessGroupState(NEWLINE distributed_c10d.get_rank(self._process_group),NEWLINE distributed_c10d.get_rank(),NEWLINE distributed_c10d.get_world_size(self._process_group),NEWLINE distributed_c10d.get_world_size(),NEWLINE )NEWLINENEWLINE return self._local_shards, self._metadata, pg_state, self._sharding_spec, self._init_rrefsNEWLINENEWLINE def __setstate__(self, state):NEWLINE self._sharded_tensor_id = NoneNEWLINE if not distributed_c10d.is_initialized():NEWLINE raise RuntimeError(NEWLINE 'Need to initialize default process group using 'NEWLINE '"init_process_group" before loading ShardedTensor')NEWLINENEWLINE self._local_shards, self._metadata, pg_state, self._sharding_spec, self._init_rrefs = stateNEWLINENEWLINE # Setup process groupNEWLINE self._process_group = get_current_process_group()NEWLINENEWLINE # Validate process group.NEWLINE local_rank = distributed_c10d.get_rank(self._process_group)NEWLINE if pg_state.local_rank != local_rank:NEWLINE raise RuntimeError(NEWLINE f'Local rank at save time was {pg_state.local_rank}, but at 'NEWLINE f'load time was {local_rank}')NEWLINENEWLINE global_rank = distributed_c10d.get_rank()NEWLINE if pg_state.global_rank != global_rank:NEWLINE raise RuntimeError(NEWLINE f'Global rank at save time was {pg_state.global_rank}, but at 'NEWLINE f'load time was {global_rank}')NEWLINENEWLINE local_world_size = distributed_c10d.get_world_size(self._process_group)NEWLINE if pg_state.local_world_size != local_world_size:NEWLINE raise RuntimeError(NEWLINE f'Local world size at save time was {pg_state.local_world_size}, 'NEWLINE f'but at load time was {local_world_size}')NEWLINENEWLINE global_world_size = distributed_c10d.get_world_size()NEWLINE if pg_state.global_world_size != global_world_size:NEWLINE raise RuntimeError(NEWLINE f'Global world size at save time was {pg_state.global_world_size}, 'NEWLINE f'but at load time was {global_world_size}')NEWLINENEWLINE self._post_init()NEWLINENEWLINENEWLINEdef _create_tensor_from_params(*size, local_device, tensor_init_params: TensorInitParams):NEWLINE """ Helper to construct tensor from size, device and common params. """NEWLINENEWLINE create_op = tensor_init_params.create_opNEWLINE dtype = tensor_init_params.tensor_properties.dtypeNEWLINE layout = tensor_init_params.tensor_properties.layoutNEWLINE requires_grad = tensor_init_params.tensor_properties.requires_gradNEWLINE memory_format = tensor_init_params.tensor_properties.memory_formatNEWLINE pin_memory = tensor_init_params.tensor_properties.pin_memoryNEWLINENEWLINE if create_op == CreateOp.ONES:NEWLINE return torch.ones(*size, dtype=dtype, layout=layout,NEWLINE device=local_device, pin_memory=pin_memory,NEWLINE requires_grad=requires_grad,)NEWLINE elif create_op == CreateOp.EMPTY:NEWLINE return torch.empty(*size, dtype=dtype, layout=layout,NEWLINE device=local_device, requires_grad=requires_grad,NEWLINE # NB: memory_format param is not accepted by torch.onesNEWLINE memory_format=memory_format, pin_memory=pin_memory,)NEWLINE elif tensor_init_params.create_op == CreateOp.ZEROS:NEWLINE return torch.zeros(*size,NEWLINE dtype=dtype,NEWLINE layout=layout,NEWLINE device=local_device,NEWLINE pin_memory=pin_memory,NEWLINE requires_grad=requires_grad,)NEWLINE elif tensor_init_params.create_op == CreateOp.RAND:NEWLINE return torch.rand(*size,NEWLINE dtype=dtype,NEWLINE layout=layout,NEWLINE device=local_device,NEWLINE pin_memory=pin_memory,NEWLINE requires_grad=requires_grad,)NEWLINE elif tensor_init_params.create_op == CreateOp.FULL:NEWLINE return torch.full(size=size,NEWLINE fill_value=tensor_init_params.fill_value,NEWLINE layout=layout,NEWLINE dtype=dtype,NEWLINE requires_grad=requires_grad,NEWLINE device=local_device, )NEWLINE else:NEWLINE raise ValueError(f'Unsupported create_op: {tensor_init_params.create_op}')NEWLINENEWLINENEWLINEclass _PartialTensor(object):NEWLINE """NEWLINE PartialTensor is an abstraction to represent Tensors that needNEWLINE aggregation across multiple devices and multiple processes.NEWLINENEWLINE PartialTensor is initialized in an SPMD like fashion where each rankNEWLINE initializes the PartialTensor. The PartialTensor object on each rankNEWLINE then only stores the local partial shard, process group and theNEWLINE aggregation way to get a full tensor.NEWLINENEWLINE PartialTensor doesn't provide any Tensor like operations but is aNEWLINE wrapper providing the Tensor representing the local partial shard.NEWLINENEWLINE We assume the size of each local tensor to be exactly the same.NEWLINENEWLINE Users can apply custom distributed sharded computations on top ofNEWLINE this primitive.NEWLINENEWLINE Args:NEWLINE local_partial_shard (Tensor): Partial result stored across ranks.NEWLINE process_group (ProcessGroup): The process group to aggregate on.NEWLINE reduce_op (distributed_c10d.ReduceOp): Way to aggregate the partial result.NEWLINE Default: ``distributed_c10d.ReduceOp.SUM``NEWLINENEWLINE Examples:NEWLINE >>> # All tensors below are of torch.int64 type.NEWLINE >>> # We have 2 process groups, 2 ranks.NEWLINE >>> tensor = torch.arange(2, dtype=torch.int64) + 1 + 2 * rankNEWLINE >>> tensor = torch.cat([tensor, tensor + 2])NEWLINE >>> tensorNEWLINE tensor([1, 2, 3, 4]) # Rank 0NEWLINE tensor([3, 4, 5, 6]) # Rank 1NEWLINE >>> partial_tensor = _PartialTensor(tensor, distributed_c10d.ReduceOp.MAX)NEWLINE >>> sharding_dim = 0NEWLINE >>> collect_spec = ChunkShardingSpec(NEWLINE dim=sharding_dim,NEWLINE placements=[NEWLINE "rank:0/cuda:0",NEWLINE "rank:1/cuda:1",NEWLINE ],NEWLINE )NEWLINE >>> complete_tensor = partial_tensor.reshard(collect_spec)NEWLINE >>> complete_tensorNEWLINE ShardedTensor(NEWLINE ShardedTensorMetadata(NEWLINE shards_metadata=[NEWLINE ShardMetadata(shard_offsets=[0], shard_sizes=[2], placement=rank:0/cuda:0),NEWLINE ShardMetadata(shard_offsets=[2], shard_sizes=[2], placement=rank:1/cuda:1)],NEWLINE size=torch.Size([4])NEWLINE )NEWLINE >>> complete_tensor.local_tensor()NEWLINE tensor([3, 4]) # Rank 0NEWLINE tensor([5, 6]) # Rank 1NEWLINENEWLINE >>> # All tensors below are of torch.cfloat type.NEWLINE >>> # We have 2 process groups, 2 ranks.NEWLINE >>> tensor = torch.tensor([1, 2]) + 2 * rankNEWLINE >>> tensor = torch.cat([tensor, tensor + 2])NEWLINE >>> tensorNEWLINE tensor([1, 2, 3, 4]) # Rank 0NEWLINE tensor([3, 4, 5, 6]) # Rank 1NEWLINE >>> partial_tensor = _PartialTensor(tensor)NEWLINE >>> complete_tensor = partial_tensor.reshard(collect_spec)NEWLINE >>> complete_tensorNEWLINE ShardedTensor(NEWLINE ShardedTensorMetadata(NEWLINE shards_metadata=[NEWLINE ShardMetadata(shard_offsets=[0], shard_sizes=[2], placement=rank:0/cuda:0),NEWLINE ShardMetadata(shard_offsets=[2], shard_sizes=[2], placement=rank:1/cuda:1)],NEWLINE size=torch.Size([4])NEWLINE )NEWLINE >>> complete_tensor.local_tensor()NEWLINE tensor([4, 6]) # Rank 0NEWLINE tensor([8, 10]) # Rank 1NEWLINE """NEWLINENEWLINE def __init__(NEWLINE self, local_shard, process_group=None, reduce_op=distributed_c10d.ReduceOp.SUMNEWLINE ):NEWLINE self.local_shard = local_shardNEWLINE self.process_group = (NEWLINE process_groupNEWLINE if process_groupNEWLINE else dist.distributed_c10d._get_default_group()NEWLINE )NEWLINE self.reduce_op = reduce_opNEWLINENEWLINE def __post_init__(self):NEWLINE if not isinstance(self.local_shard, torch.Tensor):NEWLINE raise ValueError("local_shard needs to be a Tensor.")NEWLINE if not isinstance(self.reduce_op, distributed_c10d.ReduceOp):NEWLINE raise ValueError(NEWLINE "reduce_op needs to be a member of distributed_c10d.ReduceOp."NEWLINE )NEWLINENEWLINE def reshard(self, resharding_spec: ShardingSpec) -> ShardedTensor:NEWLINE """NEWLINE The reshard happens in two steps logically:NEWLINENEWLINE 1. Aggregate all the shards of the partial tensor.NEWLINE 2. Shard this tensor according to the provided spec.NEWLINENEWLINE In reality, for the sake of performance, we consolidate all partial tensorsNEWLINE across multiple ranks and covert to a sharded tensor in one step.NEWLINENEWLINE Args:NEWLINE resharding_spec (:class:`torch.distributed._shard.sharding_spec.ShardingSpec`):NEWLINE The specification describing how we reshard the aggregated local result.NEWLINENEWLINE Returns:NEWLINE A :class:`ShardedTensor` filled with local aggregated result.NEWLINE """NEWLINE if not isinstance(resharding_spec, ChunkShardingSpec):NEWLINE raise NotImplementedError("Only ChunkShardingSpec supported for reshard.")NEWLINE sharding_dim = int(resharding_spec.dim) # type: ignore[attr-defined]NEWLINE if self.local_shard.size(sharding_dim) % self.process_group.size() != 0:NEWLINE raise ValueError('World size need to divide the length of the dimension.')NEWLINE if self.local_shard.is_complex():NEWLINE raise NotImplementedError("Only real partial tensor supported for reshard.")NEWLINENEWLINE local_shards = self.local_shard.chunk(self.process_group.size(), dim=sharding_dim)NEWLINE local_result = reduce_scatter(NEWLINE torch.empty_like(local_shards[0]), list(local_shards), op=self.reduce_opNEWLINE )NEWLINENEWLINE sharded_tensor_size = self.local_shard.size()NEWLINE current_offsets = [0] * len(local_result.size())NEWLINE shards = []NEWLINE rank = self.process_group.rank()NEWLINE for idx, placement in enumerate(resharding_spec.placements): # type: ignore[attr-defined]NEWLINE if rank == placement.rank(): # type: ignore[union-attr]NEWLINE local_metadata = ShardMetadata(NEWLINE shard_offsets=current_offsets,NEWLINE shard_sizes=list(local_result.size()),NEWLINE placement=placement,NEWLINE )NEWLINE shards.append(Shard(local_result, local_metadata))NEWLINE breakNEWLINE current_offsets[sharding_dim] += local_result.size(sharding_dim) # type: ignore[index]NEWLINENEWLINE st = ShardedTensor._init_from_local_shards(NEWLINE shards, tuple(sharded_tensor_size), process_group=self.process_groupNEWLINE )NEWLINE st._sharding_spec = copy.deepcopy(resharding_spec)NEWLINENEWLINE return stNEWLINE |
from __future__ import print_function, division, absolute_importNEWLINENEWLINEimport osNEWLINEimport timeNEWLINEimport subprocessNEWLINEfrom contextlib import contextmanagerNEWLINENEWLINEimport pytestNEWLINENEWLINEimport skeinNEWLINENEWLINENEWLINE@contextmanagerNEWLINEdef set_skein_config(tmpdir):NEWLINE tmpdir = str(tmpdir)NEWLINE old = skein.properties.config_dirNEWLINE try:NEWLINE skein.properties._mapping['config_dir'] = tmpdirNEWLINE yield tmpdirNEWLINE finally:NEWLINE skein.properties._mapping['config_dir'] = [email protected] skein_config(tmpdir_factory):NEWLINE with set_skein_config(tmpdir_factory.mktemp('config')) as config:NEWLINE yield [email protected](scope="session")NEWLINEdef security(tmpdir_factory):NEWLINE return skein.Security.from_new_directory(str(tmpdir_factory.mktemp('security')))[email protected](scope="session")NEWLINEdef has_kerberos_enabled():NEWLINE return HAS_KERBEROSNEWLINENEWLINENEWLINEKEYTAB_PATH = "/home/testuser/testuser.keytab"NEWLINEHAS_KERBEROS = os.path.exists(KEYTAB_PATH)NEWLINENEWLINENEWLINEdef do_kinit():NEWLINE subprocess.check_call(["kinit", "-kt", KEYTAB_PATH, "testuser"])[email protected](scope="session")NEWLINEdef kinit():NEWLINE if HAS_KERBEROS:NEWLINE do_kinit()[email protected] not_logged_in():NEWLINE if not HAS_KERBEROS:NEWLINE pytest.skip("Without kerberos, users are always logged in")NEWLINE try:NEWLINE subprocess.check_call(["kdestroy"])NEWLINE yieldNEWLINE finally:NEWLINE do_kinit()[email protected](scope="session")NEWLINEdef client(security, kinit):NEWLINE with skein.Client(security=security) as client:NEWLINE yield clientNEWLINENEWLINENEWLINEsleeper = skein.Service(resources=skein.Resources(memory=128, vcores=1),NEWLINE commands=['sleep infinity'])NEWLINENEWLINENEWLINEsleep_until_killed = skein.ApplicationSpec(name="sleep_until_killed",NEWLINE queue="default",NEWLINE tags={'sleeps'},NEWLINE services={'sleeper': sleeper})NEWLINENEWLINENEWLINEdef check_is_shutdown(client, app_id, status=None):NEWLINE timeleft = 5NEWLINE while timeleft:NEWLINE if client.application_report(app_id).state != 'RUNNING':NEWLINE breakNEWLINE time.sleep(0.1)NEWLINE timeleft -= 0.1NEWLINE else:NEWLINE assert False, "Application wasn't properly terminated"NEWLINENEWLINE if status is not None:NEWLINE assert client.application_report(app_id).final_status == statusNEWLINENEWLINENEWLINEdef wait_for_completion(client, app_id, timeout=30):NEWLINE while timeout:NEWLINE final_status = client.application_report(app_id).final_statusNEWLINE if final_status != 'UNDEFINED':NEWLINE return final_statusNEWLINE time.sleep(0.1)NEWLINE timeout -= 0.1NEWLINE else:NEWLINE assert False, "Application timed out"NEWLINENEWLINENEWLINE@contextmanagerNEWLINEdef ensure_shutdown(client, app_id, status=None):NEWLINE try:NEWLINE yieldNEWLINE except Exception:NEWLINE client.kill_application(app_id)NEWLINE raiseNEWLINE finally:NEWLINE try:NEWLINE check_is_shutdown(client, app_id, status=status)NEWLINE except AssertionError:NEWLINE client.kill_application(app_id)NEWLINE raiseNEWLINENEWLINENEWLINE@contextmanagerNEWLINEdef run_application(client, spec=sleep_until_killed):NEWLINE app = client.submit_and_connect(spec)NEWLINE with ensure_shutdown(client, app.id):NEWLINE yield appNEWLINENEWLINENEWLINEdef wait_for_containers(app, n, **kwargs):NEWLINE timeleft = 5NEWLINE while timeleft:NEWLINE containers = app.get_containers(**kwargs)NEWLINE if len(containers) == n:NEWLINE breakNEWLINE time.sleep(0.1)NEWLINE timeleft -= 0.1NEWLINE else:NEWLINE assert False, "timeout"NEWLINENEWLINE return containersNEWLINENEWLINENEWLINEdef get_logs(app_id, tries=3):NEWLINE command = ["yarn", "logs", "-applicationId", app_id]NEWLINE for i in range(tries - 1):NEWLINE try:NEWLINE return subprocess.check_output(command).decode()NEWLINE except Exception:NEWLINE passNEWLINE return subprocess.check_output(command).decode()NEWLINE |
# SETTINGNEWLINEimport osNEWLINENEWLINEencoding_ = 'utf_8_sig'NEWLINEtime_zone = 'Asia/Shanghai'NEWLINEpool_max_workers = 8NEWLINEdefault_options_ = {NEWLINE 'encoding': encoding_,NEWLINE}NEWLINEbase_dir = os.path.dirname(os.path.abspath(__file__)).replace("\\", "/")NEWLINEstatic_dir = os.path.join(base_dir, 'static').replace("\\", "/")NEWLINEdefault_wkhtmltopdf_path = f'{base_dir}/bin/wkhtmltopdf.exe'NEWLINEdefault_wkhtmltoimage_path = f'{base_dir}/bin/wkhtmltoimage.exe'NEWLINENEWLINE# default_wkhtmltopdf_path = r'D:/wkhtmltopdf/bin/wkhtmltopdf.exe'NEWLINE# default_wkhtmltoimage_path = r'D:/wkhtmltopdf/bin/wkhtmltoimage.exe'NEWLINENEWLINEecho_info = '{}{} → {} exported successfully' |
from __future__ import absolute_import, division #makes KratosMultiphysics backward compatible with python 2.6 and 2.7NEWLINENEWLINE# Importing the Kratos LibraryNEWLINEimport KratosMultiphysicsNEWLINENEWLINE# Import applicationsNEWLINEimport KratosMultiphysics.ConvectionDiffusionApplication as KratosConvDiffNEWLINEimport KratosMultiphysics.MultilevelMonteCarloApplication as KratosMLMCNEWLINENEWLINE# Avoid printing of Kratos informationsNEWLINEKratosMultiphysics.Logger.GetDefaultOutput().SetSeverity(KratosMultiphysics.Logger.Severity.WARNING) # avoid printing of Kratos thingsNEWLINENEWLINE# Importing the base classNEWLINEfrom analysis_stage import AnalysisStageNEWLINENEWLINE# Import packagesNEWLINEimport numpy as npNEWLINENEWLINE# Import Monte Carlo libraryNEWLINEimport mc_utilities as mcNEWLINENEWLINE# Import cpickle to pickle the serializerNEWLINEtry:NEWLINE import cpickle as pickle # Use cPickle on Python 2.7NEWLINEexcept ImportError:NEWLINE import pickleNEWLINENEWLINE# Import exaquteNEWLINEfrom exaqute.ExaquteTaskPyCOMPSs import * # to execute with pycompssNEWLINE# from exaqute.ExaquteTaskHyperLoom import * # to execute with the IT4 schedulerNEWLINE# from exaqute.ExaquteTaskLocal import * # to execute with python3NEWLINE'''NEWLINEget_value_from_remote is the equivalent of compss_wait_on: a synchronization pointNEWLINEin future, when everything is integrated with the it4i team, importing exaqute.ExaquteTaskHyperLoom you can launch your code with their scheduler instead of BSCNEWLINE'''NEWLINENEWLINENEWLINE'''Adapt the following class depending on the problem, deriving the MonteCarloAnalysis class from the problem of interest'''NEWLINENEWLINE'''NEWLINEThis Analysis Stage implementation solves the elliptic PDE in (0,1)^2 with zero Dirichlet boundary conditionsNEWLINE-lapl(u) = xi*f, f= -432*x*(x-1)*y*(y-1)NEWLINE f= -432*(x**2+y**2-x-y)NEWLINEwhere xi is a Beta(2,6) random variable, and computes statistic of the QoINEWLINEQ = int_(0,1)^2 u(x,y)dxdyNEWLINE'''NEWLINEclass MonteCarloAnalysis(AnalysisStage):NEWLINE '''Main analysis stage for Monte Carlo simulations'''NEWLINE def __init__(self,input_model,input_parameters,sample):NEWLINE self.sample = sampleNEWLINE super(MonteCarloAnalysis,self).__init__(input_model,input_parameters)NEWLINE self._GetSolver().main_model_part.AddNodalSolutionStepVariable(KratosMultiphysics.NODAL_AREA)NEWLINENEWLINE def _CreateSolver(self):NEWLINE import convection_diffusion_stationary_solverNEWLINE return convection_diffusion_stationary_solver.CreateSolver(self.model,self.project_parameters["solver_settings"])NEWLINENEWLINE def _GetSimulationName(self):NEWLINE return "Monte Carlo Analysis"NEWLINENEWLINE '''Introduce here the stochasticity in the right hand side defining the forcing function and apply the stochastic contribute'''NEWLINE def ModifyInitialProperties(self):NEWLINE for node in self.model.GetModelPart("MLMCLaplacianModelPart").Nodes:NEWLINE coord_x = node.XNEWLINE coord_y = node.YNEWLINE # forcing = -432.0 * coord_x * (coord_x - 1) * coord_y * (coord_y - 1)NEWLINE forcing = -432.0 * (coord_x**2 + coord_y**2 - coord_x - coord_y) # this forcing presents the below commented analytical solutionNEWLINE node.SetSolutionStepValue(KratosMultiphysics.HEAT_FLUX,forcing*self.sample)NEWLINENEWLINENEWLINE##################################################NEWLINE######## END OF CLASS MONTECARLOANALYSIS #########NEWLINE##################################################NEWLINENEWLINENEWLINE'''NEWLINEfunction generating the random sampleNEWLINEhere the sample has a beta distribution with parameters alpha = 2.0 and beta = 6.0NEWLINE'''NEWLINEdef GenerateSample():NEWLINE alpha = 2.0NEWLINE beta = 6.0NEWLINE number_samples = 1NEWLINE sample = np.random.beta(alpha,beta,number_samples)NEWLINE return sampleNEWLINENEWLINENEWLINE'''NEWLINEfunction evaluating the QoI of the problem: int_{domain} TEMPERATURE(x,y) dx dyNEWLINEright now we are using the midpoint rule to evaluate the integral: improve!NEWLINE'''NEWLINEdef EvaluateQuantityOfInterest(simulation):NEWLINE """here we evaluate the QoI of the problem: int_{domain} SOLUTION(x,y) dx dyNEWLINE we use the midpoint rule to evaluate the integral"""NEWLINE KratosMultiphysics.CalculateNodalAreaProcess(simulation._GetSolver().main_model_part,2).Execute()NEWLINE Q = 0.0NEWLINE for node in simulation._GetSolver().main_model_part.Nodes:NEWLINE Q = Q + (node.GetSolutionStepValue(KratosMultiphysics.NODAL_AREA)*node.GetSolutionStepValue(KratosMultiphysics.TEMPERATURE))NEWLINE return QNEWLINENEWLINENEWLINE'''NEWLINEfunction called in the main returning a future object (the result class) and an integer (the finer level)NEWLINEinput:NEWLINE pickled_coarse_model : pickled modelNEWLINE pickled_coarse_parameters : pickled parametersNEWLINEoutput:NEWLINE MonteCarloResults class : class of the simulation resultsNEWLINE current_MC_level : level of the current MLMC simulationNEWLINE'''NEWLINEdef ExecuteMonteCarloAnalysis(pickled_model, pickled_parameters):NEWLINE current_MC_level = 0 # MC has only level 0NEWLINE return (ExecuteMonteCarloAnalysis_Task(pickled_model, pickled_parameters),current_MC_level)NEWLINENEWLINENEWLINE'''NEWLINEfunction executing the problemNEWLINEinput:NEWLINE model : serialization of the modelNEWLINE parameters : serialization of the Project ParametersNEWLINEoutput:NEWLINE QoI : Quantity of InterestNEWLINE'''NEWLINE@ExaquteTask(returns=1)NEWLINEdef ExecuteMonteCarloAnalysis_Task(pickled_model, pickled_parameters):NEWLINE '''overwrite the old model serializer with the unpickled one'''NEWLINE model_serializer = pickle.loads(pickled_model)NEWLINE current_model = KratosMultiphysics.Model()NEWLINE model_serializer.Load("ModelSerialization",current_model)NEWLINE del(model_serializer)NEWLINE '''overwrite the old parameters serializer with the unpickled one'''NEWLINE serialized_parameters = pickle.loads(pickled_parameters)NEWLINE current_parameters = KratosMultiphysics.Parameters()NEWLINE serialized_parameters.Load("ParametersSerialization",current_parameters)NEWLINE del(serialized_parameters)NEWLINE '''initialize the MonteCarloResults class'''NEWLINE current_level = 0 # always 0 for MCNEWLINE mc_results_class = mc.MonteCarloResults(current_level)NEWLINE sample = GenerateSample()NEWLINE simulation = MonteCarloAnalysis(current_model,current_parameters,sample)NEWLINE simulation.Run()NEWLINE QoI = EvaluateQuantityOfInterest(simulation)NEWLINE mc_results_class.QoI[current_level].append(QoI) # saving results in the corresponding list, for MC only list of level 0NEWLINE return mc_results_classNEWLINENEWLINENEWLINE'''NEWLINEfunction serializing and pickling the model and the parameters of the problemNEWLINEthe idea is the following:NEWLINEi) from Model/Parameters Kratos object to StreamSerializer Kratos objectNEWLINEii) from StreamSerializer Kratos object to pickle stringNEWLINEiii) from pickle string to StreamSerializer Kratos objectNEWLINEiv) from StreamSerializer Kratos object to Model/Parameters Kratos objectNEWLINEinput:NEWLINE parameter_file_name : path of the Project Parameters fileNEWLINEoutput:NEWLINE pickled_model : model serializatonNEWLINE pickled_parameters : project parameters serializationNEWLINE'''NEWLINE@ExaquteTask(parameter_file_name=FILE_IN,returns=2)NEWLINEdef SerializeModelParameters_Task(parameter_file_name):NEWLINE with open(parameter_file_name,'r') as parameter_file:NEWLINE parameters = KratosMultiphysics.Parameters(parameter_file.read())NEWLINE local_parameters = parametersNEWLINE model = KratosMultiphysics.Model()NEWLINE # local_parameters["solver_settings"]["model_import_settings"]["input_filename"].SetString(model_part_file_name[:-5])NEWLINE fake_sample = GenerateSample()NEWLINE simulation = MonteCarloAnalysis(model,local_parameters,fake_sample)NEWLINE simulation.Initialize()NEWLINE serialized_model = KratosMultiphysics.StreamSerializer()NEWLINE serialized_model.Save("ModelSerialization",simulation.model)NEWLINE serialized_parameters = KratosMultiphysics.StreamSerializer()NEWLINE serialized_parameters.Save("ParametersSerialization",simulation.project_parameters)NEWLINE # pickle dataserialized_dataNEWLINE pickled_model = pickle.dumps(serialized_model, 2) # second argument is the protocol and is NECESSARY (according to pybind11 docs)NEWLINE pickled_parameters = pickle.dumps(serialized_parameters, 2)NEWLINE print("\n","#"*50," SERIALIZATION COMPLETED ","#"*50,"\n")NEWLINE return pickled_model,pickled_parametersNEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINENEWLINE '''set the ProjectParameters.json path'''NEWLINE parameter_file_name = "../tests/PoissonSquareTest/parameters_poisson_finer.json"NEWLINE '''create a serialization of the model and of the project parameters'''NEWLINE pickled_model,pickled_parameters = SerializeModelParameters_Task(parameter_file_name)NEWLINE '''customize setting parameters of the ML simulation'''NEWLINE settings_MC_simulation = KratosMultiphysics.Parameters("""NEWLINE {NEWLINE "tolerance" : 0.1,NEWLINE "cphi" : 5e-1,NEWLINE "batch_size" : 20,NEWLINE "convergence_criteria" : "MC_higher_moments_sequential_stopping_rule"NEWLINE }NEWLINE """)NEWLINE '''contruct MonteCarlo class'''NEWLINE mc_class = mc.MonteCarlo(settings_MC_simulation)NEWLINE '''start MC algorithm'''NEWLINE while mc_class.convergence is not True:NEWLINE mc_class.InitializeMCPhase()NEWLINE mc_class.ScreeningInfoInitializeMCPhase()NEWLINE for instance in range (mc_class.difference_number_samples[0]):NEWLINE mc_class.AddResults(ExecuteMonteCarloAnalysis(pickled_model,pickled_parameters))NEWLINE mc_class.FinalizeMCPhase()NEWLINE mc_class.ScreeningInfoFinalizeMCPhase()NEWLINENEWLINE mc_class.QoI.mean = get_value_from_remote(mc_class.QoI.mean)NEWLINE print("\nMC mean = ",mc_class.QoI.mean)NEWLINENEWLINENEWLINE ''' The below part evaluates the relative L2 error between the numerical solution SOLUTION(x,y,sample) and the analytical solution, also dependent on sample.NEWLINE Analytical solution available in case FORCING = sample * -432.0 * (coord_x**2 + coord_y**2 - coord_x - coord_y)'''NEWLINE # model_serializer = pickle.loads(pickled_model)NEWLINE # current_model = KratosMultiphysics.Model()NEWLINE # model_serializer.Load("ModelSerialization",current_model)NEWLINE # del(model_serializer)NEWLINE # serialized_parameters = pickle.loads(pickled_parameters)NEWLINE # current_parameters = KratosMultiphysics.Parameters()NEWLINE # serialized_parameters.Load("ParametersSerialization",current_parameters)NEWLINE # del(serialized_parameters)NEWLINE # sample = 1.0NEWLINE # simulation = MonteCarloAnalysis(current_model,current_parameters,sample)NEWLINE # simulation.Run()NEWLINE # KratosMultiphysics.CalculateNodalAreaProcess(simulation._GetSolver().main_model_part,2).Execute()NEWLINE # error = 0.0NEWLINE # L2norm_analyticalsolution = 0.0NEWLINE # for node in simulation._GetSolver().main_model_part.Nodes:NEWLINE # local_error = ((node.GetSolutionStepValue(KratosMultiphysics.TEMPERATURE) - (432.0*simulation.sample*node.X*node.Y*(1-node.X)*(1-node.Y)*0.5))**2) * node.GetSolutionStepValue(KratosMultiphysics.NODAL_AREA)NEWLINE # error = error + local_errorNEWLINE # local_analyticalsolution = (432.0*simulation.sample*node.X*node.Y*(1-node.X)*(1-node.Y)*0.5)**2 * node.GetSolutionStepValue(KratosMultiphysics.NODAL_AREA)NEWLINE # L2norm_analyticalsolution = L2norm_analyticalsolution + local_analyticalsolutionNEWLINE # error = np.sqrt(error)NEWLINE # L2norm_analyticalsolution = np.sqrt(L2norm_analyticalsolution)NEWLINE # print("L2 relative error = ", error/L2norm_analyticalsolution) |
from pylinsql.async_database import ConnectionParametersNEWLINEimport unittestNEWLINENEWLINENEWLINEclass DatabaseTestCase(unittest.IsolatedAsyncioTestCase):NEWLINE params: ConnectionParametersNEWLINENEWLINE def __init__(self, method_name: str):NEWLINE super().__init__(method_name)NEWLINE self.params = ConnectionParameters()NEWLINENEWLINE def assertEmpty(self, obj):NEWLINE self.assertFalse(obj)NEWLINENEWLINE def assertNotEmpty(self, obj):NEWLINE self.assertTrue(obj)NEWLINE |
from __future__ import absolute_importNEWLINEfrom __future__ import divisionNEWLINEfrom __future__ import unicode_literalsNEWLINEfrom collections import defaultdictNEWLINEfrom unittest import skipUnless, SkipTestNEWLINEfrom uuid import uuid4, UUIDNEWLINENEWLINEfrom django.conf import settingsNEWLINEfrom django.test import TestCaseNEWLINEfrom django.test.utils import override_settingsNEWLINENEWLINEfrom corehq.form_processor.backends.sql.dbaccessors import ShardAccessorNEWLINEfrom corehq.form_processor.models import XFormInstanceSQL, CommCareCaseSQLNEWLINEfrom corehq.form_processor.tests.utils import create_form_for_test, FormProcessorTestUtils, use_sql_backendNEWLINEfrom corehq.sql_db.config import partition_configNEWLINEimport sixNEWLINEfrom six.moves import rangeNEWLINENEWLINEDOMAIN = 'sharding-test'NEWLINENEWLINENEWLINE@use_sql_backendNEWLINE@skipUnless(settings.USE_PARTITIONED_DATABASE, 'Only applicable if sharding is setup')NEWLINEclass ShardingTests(TestCase):NEWLINENEWLINE @classmethodNEWLINE def setUpClass(cls):NEWLINE if not settings.USE_PARTITIONED_DATABASE:NEWLINE # https://github.com/nose-devs/nose/issues/946NEWLINE raise SkipTest('Only applicable if sharding is setup')NEWLINE super(ShardingTests, cls).setUpClass()NEWLINE assert len(partition_config.get_form_processing_dbs()) > 1NEWLINENEWLINE def tearDown(self):NEWLINE FormProcessorTestUtils.delete_all_sql_forms(DOMAIN)NEWLINE FormProcessorTestUtils.delete_all_sql_cases(DOMAIN)NEWLINE super(ShardingTests, self).tearDown()NEWLINENEWLINE def test_objects_only_in_one_db(self):NEWLINE case_id = uuid4().hexNEWLINE form = create_form_for_test(DOMAIN, case_id=case_id)NEWLINENEWLINE dbs_with_form = []NEWLINE dbs_with_case = []NEWLINE for db in partition_config.get_form_processing_dbs():NEWLINE form_in_db = XFormInstanceSQL.objects.using(db).filter(form_id=form.form_id).exists()NEWLINE if form_in_db:NEWLINE dbs_with_form.append(db)NEWLINENEWLINE case_in_db = CommCareCaseSQL.objects.using(db).filter(case_id=case_id).exists()NEWLINE if case_in_db:NEWLINE dbs_with_case.append(db)NEWLINENEWLINE self.assertEqual(1, len(dbs_with_form))NEWLINE self.assertEqual(1, len(dbs_with_case))NEWLINENEWLINE def test_objects_distributed_to_all_dbs(self):NEWLINE """NEWLINE Rudimentary test to ensure that not all cases / forms get saved to the same DB.NEWLINE """NEWLINE num_forms = 20NEWLINE for i in range(num_forms):NEWLINE create_form_for_test(DOMAIN, case_id=uuid4().hex)NEWLINENEWLINE forms_per_db = {}NEWLINE cases_per_db = {}NEWLINE for db in partition_config.get_form_processing_dbs():NEWLINE forms_per_db[db] = XFormInstanceSQL.objects.using(db).filter(domain=DOMAIN).count()NEWLINE cases_per_db[db] = CommCareCaseSQL.objects.using(db).filter(domain=DOMAIN).count()NEWLINENEWLINE self.assertEqual(num_forms, sum(forms_per_db.values()), forms_per_db)NEWLINE self.assertEqual(num_forms, sum(cases_per_db.values()), cases_per_db)NEWLINE self.assertTrue(NEWLINE all(num_forms_in_db < num_forms for num_forms_in_db in forms_per_db.values()),NEWLINE forms_per_dbNEWLINE )NEWLINE self.assertTrue(NEWLINE all(num_cases_in_db < num_forms for num_cases_in_db in cases_per_db.values()),NEWLINE cases_per_dbNEWLINE )NEWLINENEWLINE def test_python_hashing_gives_correct_db(self):NEWLINE # Rudimentary test to ensure that python sharding matches SQL shardingNEWLINE num_forms = 100NEWLINE form_ids = [create_form_for_test(DOMAIN).form_id for i in range(num_forms)]NEWLINENEWLINE dbs_for_docs = ShardAccessor.get_database_for_docs(form_ids)NEWLINE for form_id, db_alias in dbs_for_docs.items():NEWLINE XFormInstanceSQL.objects.using(db_alias).get(form_id=form_id)NEWLINENEWLINE def test_same_dbalias_util(self):NEWLINE from corehq.sql_db.util import get_db_alias_for_partitioned_doc, new_id_in_same_dbaliasNEWLINE for i in range(10):NEWLINE # test multiple times to test a wider probabilityNEWLINE f1_id = six.text_type(uuid4())NEWLINE old_db_alias = get_db_alias_for_partitioned_doc(f1_id)NEWLINE f2_id = new_id_in_same_dbalias(f1_id)NEWLINE new_db_alias = get_db_alias_for_partitioned_doc(f2_id)NEWLINE self.assertEqual(new_db_alias, old_db_alias)NEWLINENEWLINENEWLINEDATABASES = {NEWLINE key: {NEWLINE 'ENGINE': 'django.db.backends.sqlite3',NEWLINE 'NAME': key,NEWLINE } for key in ['default', 'proxy', 'p1', 'p2', 'p3', 'p4', 'p5']NEWLINE}NEWLINENEWLINENEWLINEPARTITION_DATABASE_CONFIG = {NEWLINE 'shards': {NEWLINE 'p1': [0, 204],NEWLINE 'p2': [205, 409],NEWLINE 'p3': [410, 614],NEWLINE 'p4': [615, 819],NEWLINE 'p5': [820, 1023]NEWLINE },NEWLINE 'groups': {NEWLINE 'main': ['default'],NEWLINE 'proxy': ['proxy'],NEWLINE 'form_processing': ['p1', 'p2', 'p3', 'p4', 'p5'],NEWLINE }NEWLINE}NEWLINENEWLINENEWLINE@use_sql_backendNEWLINE@override_settings(PARTITION_DATABASE_CONFIG=PARTITION_DATABASE_CONFIG, DATABASES=DATABASES)NEWLINE@skipUnless(settings.USE_PARTITIONED_DATABASE, 'Only applicable if sharding is setup')NEWLINEclass ShardAccessorTests(TestCase):NEWLINENEWLINE @classmethodNEWLINE def setUpClass(cls):NEWLINE if not settings.USE_PARTITIONED_DATABASE:NEWLINE # https://github.com/nose-devs/nose/issues/946NEWLINE raise SkipTest('Only applicable if sharding is setup')NEWLINE super(ShardAccessorTests, cls).setUpClass()NEWLINE partition_config.get_django_shard_map.reset_cache(partition_config)NEWLINE partition_config.get_shards.reset_cache(partition_config)NEWLINE partition_config._get_django_shards.reset_cache(partition_config)NEWLINENEWLINE @classmethodNEWLINE def tearDownClass(cls):NEWLINE partition_config.get_django_shard_map.reset_cache(partition_config)NEWLINE partition_config.get_shards.reset_cache(partition_config)NEWLINE partition_config._get_django_shards.reset_cache(partition_config)NEWLINE super(ShardAccessorTests, cls).tearDownClass()NEWLINENEWLINE def test_hash_doc_ids(self):NEWLINE N = 1001NEWLINE doc_ids = [str(i) for i in range(N)]NEWLINE hashes = ShardAccessor.hash_doc_ids_sql_for_testing(doc_ids)NEWLINE self.assertEquals(len(hashes), N)NEWLINE self.assertTrue(all(isinstance(hash_, int) for hash_ in hashes.values()))NEWLINENEWLINE def test_get_database_for_docs(self):NEWLINE # test that sharding 1000 docs gives a distribution withing some toleranceNEWLINE # (bit of a vague test)NEWLINE N = 1000NEWLINE doc_ids = [str(i) for i in range(N)]NEWLINE doc_db_map = ShardAccessor.get_database_for_docs(doc_ids)NEWLINE doc_count_per_db = defaultdict(int)NEWLINE for db_alias in doc_db_map.values():NEWLINE doc_count_per_db[db_alias] += 1NEWLINENEWLINE num_dbs = len(partition_config.get_form_processing_dbs())NEWLINE even_split = int(N // num_dbs)NEWLINE tolerance = N * 0.05 # 5% tolleranceNEWLINE diffs = [abs(even_split - count) for count in doc_count_per_db.values()]NEWLINE outliers = [diff for diff in diffs if diff > tolerance]NEWLINE message = 'partitioning not within tollerance: tolerance={}, diffs={}'.format(tolerance, diffs)NEWLINE self.assertEqual(len(outliers), 0, message)NEWLINENEWLINE def test_hash_in_python(self):NEWLINE # test that python hashing matches with SQL hashingNEWLINE N = 2048NEWLINE doc_ids = [str(i) for i in range(N)]NEWLINENEWLINE sql_hashes = ShardAccessor.hash_doc_ids_sql_for_testing(doc_ids)NEWLINENEWLINE csiphash_hashes = ShardAccessor.hash_doc_ids_python(doc_ids)NEWLINE self.assertEquals(len(csiphash_hashes), N)NEWLINE self.assertTrue(all(isinstance(hash_, six.integer_types) for hash_ in csiphash_hashes.values()))NEWLINENEWLINE N_shards = 1024NEWLINE part_mask = N_shards - 1NEWLINENEWLINE sql_shards = {doc_id: hash_ & part_mask for doc_id, hash_ in sql_hashes.items()}NEWLINE python_shards = {doc_id: hash_ & part_mask for doc_id, hash_ in sql_hashes.items()}NEWLINENEWLINE self.assertEqual(python_shards, sql_shards)NEWLINENEWLINE def test_hash_uuid(self):NEWLINE uuid = UUID('403724ef9fe141f2908363918c62c2ff')NEWLINE self.assertEqual(ShardAccessor.hash_doc_id_python(uuid), 1415444857)NEWLINE self.assertEqual(ShardAccessor.hash_doc_uuid_sql_for_testing(uuid), 1415444857)NEWLINE |
import osNEWLINEimport pytestNEWLINEimport subprocessNEWLINEimport sysNEWLINEimport timeNEWLINEimport fsspecNEWLINEfrom distutils.version import LooseVersionNEWLINENEWLINEfrom dask.bytes.core import open_filesNEWLINEfrom dask.bytes._compatibility import FSSPEC_042NEWLINEfrom dask.utils import tmpdirNEWLINENEWLINEfiles = ["a", "b"]NEWLINErequests = pytest.importorskip("requests")NEWLINEerrs = (requests.exceptions.RequestException,)NEWLINEif LooseVersion(fsspec.__version__) > "0.7.4":NEWLINE aiohttp = pytest.importorskip("aiohttp")NEWLINE errs = errs + (aiohttp.client_exceptions.ClientResponseError,)[email protected](scope="module")NEWLINEdef dir_server():NEWLINE with tmpdir() as d:NEWLINE for fn in files:NEWLINE with open(os.path.join(d, fn), "wb") as f:NEWLINE f.write(b"a" * 10000)NEWLINENEWLINE cmd = [sys.executable, "-m", "http.server", "8999"]NEWLINE p = subprocess.Popen(cmd, cwd=d)NEWLINE timeout = 10NEWLINE while True:NEWLINE try:NEWLINE requests.get("http://localhost:8999")NEWLINE breakNEWLINE except requests.exceptions.ConnectionError as e:NEWLINE time.sleep(0.1)NEWLINE timeout -= 0.1NEWLINE if timeout < 0:NEWLINE raise RuntimeError("Server did not appear") from eNEWLINE yield dNEWLINE p.terminate()NEWLINENEWLINENEWLINEdef test_simple(dir_server):NEWLINE root = "http://localhost:8999/"NEWLINE fn = files[0]NEWLINE f = open_files(root + fn)[0]NEWLINE with f as f:NEWLINE data = f.read()NEWLINE assert data == open(os.path.join(dir_server, fn), "rb").read()NEWLINENEWLINENEWLINEdef test_loc(dir_server):NEWLINE root = "http://localhost:8999/"NEWLINE fn = files[0]NEWLINE f = open_files(root + fn)[0]NEWLINE expected = open(os.path.join(dir_server, fn), "rb").read()NEWLINE with f as f:NEWLINE data = f.read(2)NEWLINE assert data == expected[:2]NEWLINE assert f.loc == 2NEWLINE f.seek(0)NEWLINE data = f.read(3)NEWLINE assert data == expected[:3]NEWLINE f.seek(1, 1)NEWLINE assert f.loc == 4NEWLINENEWLINENEWLINEdef test_fetch_range_with_headers(dir_server):NEWLINE # https://github.com/dask/dask/issues/4479NEWLINE root = "http://localhost:8999/"NEWLINE fn = files[0]NEWLINE headers = {"Date": "Wed, 21 Oct 2015 07:28:00 GMT"}NEWLINE f = open_files(root + fn, headers=headers)[0]NEWLINE with f as f:NEWLINE data = f.read(length=1) + f.read(length=-1)NEWLINE assert data == open(os.path.join(dir_server, fn), "rb").read()[email protected]("block_size", [None, 99999])NEWLINEdef test_ops(dir_server, block_size):NEWLINE root = "http://localhost:8999/"NEWLINE fn = files[0]NEWLINE f = open_files(root + fn)[0]NEWLINE data = open(os.path.join(dir_server, fn), "rb").read()NEWLINE with f as f:NEWLINE # these pass because the defaultNEWLINE assert f.read(10) == data[:10]NEWLINE f.seek(0)NEWLINE assert f.read(10) == data[:10]NEWLINE assert f.read(10) == data[10:20]NEWLINE f.seek(-10, 2)NEWLINE assert f.read() == data[-10:]NEWLINENEWLINENEWLINEdef test_ops_blocksize(dir_server):NEWLINE root = "http://localhost:8999/"NEWLINE fn = files[0]NEWLINE f = open_files(root + fn, block_size=2)[0]NEWLINE data = open(os.path.join(dir_server, fn), "rb").read()NEWLINE with f as f:NEWLINE # it's OK to read the whole fileNEWLINE assert f.read() == dataNEWLINE # and now the file magically has a sizeNEWLINE assert f.size == len(data)NEWLINENEWLINE # note that if we reuse f from above, because it is tokenized, we getNEWLINE # the same open file - where is this cached?NEWLINE fn = files[1]NEWLINE f = open_files(root + fn, block_size=2)[0]NEWLINE with f as f:NEWLINE # fails because we want only 12 bytesNEWLINE with pytest.raises(ValueError):NEWLINE assert f.read(10) == data[:10]NEWLINENEWLINENEWLINEdef test_errors(dir_server):NEWLINE f = open_files("http://localhost:8999/doesnotexist")[0]NEWLINE with pytest.raises(errs):NEWLINE with f as f:NEWLINE f.read()NEWLINE f = open_files("http://nohost/")[0]NEWLINENEWLINE if FSSPEC_042:NEWLINE expected = FileNotFoundErrorNEWLINE else:NEWLINE expected = requests.exceptions.RequestExceptionNEWLINENEWLINE with pytest.raises(expected):NEWLINE with f as f:NEWLINE f.read()NEWLINE root = "http://localhost:8999/"NEWLINE fn = files[0]NEWLINE f = open_files(root + fn, mode="wb")[0]NEWLINE with pytest.raises(NotImplementedError):NEWLINE with f:NEWLINE passNEWLINE f = open_files(root + fn)[0]NEWLINE with f as f:NEWLINE with pytest.raises(ValueError):NEWLINE f.seek(-1)NEWLINENEWLINENEWLINEdef test_files(dir_server):NEWLINE root = "http://localhost:8999/"NEWLINE fs = open_files([root + f for f in files])NEWLINE for f, f2 in zip(fs, files):NEWLINE with f as f:NEWLINE assert f.read() == open(os.path.join(dir_server, f2), "rb").read()NEWLINENEWLINENEWLINEdef test_open_glob(dir_server):NEWLINE root = "http://localhost:8999/"NEWLINE fs = open_files(root + "/*")NEWLINE assert fs[0].path == "http://localhost:8999/a"NEWLINE assert fs[1].path == "http://localhost:8999/b"[email protected]@pytest.mark.xfail(reason="https://github.com/dask/dask/issues/5042", strict=False)NEWLINEdef test_parquet():NEWLINE pytest.importorskip("requests", minversion="2.21.0")NEWLINE dd = pytest.importorskip("dask.dataframe")NEWLINE pytest.importorskip("fastparquet") # no pyarrow compatibility FS yetNEWLINE df = dd.read_parquet(NEWLINE [NEWLINE "https://github.com/Parquet/parquet-compatibility/raw/"NEWLINE "master/parquet-testdata/impala/1.1.1-NONE/"NEWLINE "nation.impala.parquet"NEWLINE ]NEWLINE ).compute()NEWLINE assert df.n_nationkey.tolist() == list(range(25))NEWLINE assert df.columns.tolist() == ["n_nationkey", "n_name", "n_regionkey", "n_comment"][email protected](reason="https://github.com/dask/dask/issues/3696", strict=False)[email protected] test_bag():NEWLINE # This test pulls from different hostsNEWLINE db = pytest.importorskip("dask.bag")NEWLINE urls = [NEWLINE "https://raw.githubusercontent.com/weierophinney/pastebin/"NEWLINE "master/public/js-src/dojox/data/tests/stores/patterns.csv",NEWLINE "https://en.wikipedia.org",NEWLINE ]NEWLINE b = db.read_text(urls)NEWLINE assert b.npartitions == 2NEWLINE b.compute()[email protected](NEWLINE LooseVersion(fsspec.__version__) <= "0.4.1",NEWLINE reason="https://github.com/dask/dask/pull/5231",NEWLINE)[email protected] test_read_csv():NEWLINE dd = pytest.importorskip("dask.dataframe")NEWLINE url = (NEWLINE "https://raw.githubusercontent.com/weierophinney/pastebin/"NEWLINE "master/public/js-src/dojox/data/tests/stores/patterns.csv"NEWLINE )NEWLINE b = dd.read_csv(url)NEWLINE b.compute()NEWLINE |
#!/usr/bin/pythonNEWLINE#NEWLINE# pybuddyDXNEWLINE# python e-buddy (ibuddy alike sold on DX) daemonNEWLINE# http://code.google.com/p/pybuddyDXNEWLINE#NEWLINE# protocol reverse engineered and implemented byNEWLINE# [email protected]#NEWLINE# borrows code from http://code.google.com/p/pybuddyNEWLINE# by [email protected] and [email protected]# who got most of the code from http://cuntography.com/blog/?p=17NEWLINE# Which is based on http://scott.weston.id.au/software/pymissile/NEWLINENEWLINEimport usbNEWLINEimport timeNEWLINEimport sysNEWLINEimport socketNEWLINEimport osNEWLINEimport pwdNEWLINEimport loggingNEWLINEfrom ConfigParser import RawConfigParserNEWLINENEWLINE################NEWLINE#CommandsNEWLINE################NEWLINE# GLADNESS = 00NEWLINE# FEAR = 01NEWLINE# FIZZ = 02NEWLINE# PLEASANTSURPRISE =03NEWLINE# GRIEF = 04NEWLINE# FURY = 05NEWLINE# QUELL = 06NEWLINE# REDHEAD = 07NEWLINE# GREENHEAD = 08NEWLINE# BLUEHEAD = 09NEWLINE# YELLOWHEAD = 10NEWLINE# BLAME = 11NEWLINE# BLUEGREENHEAD = 12NEWLINE# WHITEHEAD = 13NEWLINE# HEART = 14NEWLINE# WINGS = 15NEWLINE# BODY = 16NEWLINE# NOEFFECT = 17NEWLINE# ONLINE = 18NEWLINE# BUSY = 19NEWLINE# DAZE = 20NEWLINE# BACKSOON = 21NEWLINE# AWAY = 22NEWLINE# PHONE = 23NEWLINE# LUNCH = 24NEWLINE# OFFLINE = 25NEWLINENEWLINE################NEWLINE#ConfigurationNEWLINE################NEWLINEtsleep = 0.1NEWLINENEWLINENEWLINE################NEWLINE# IBUDDY classNEWLINE################NEWLINENEWLINEclass BuddyDevice:NEWLINE SETUP = (0x21, 0x09, 0x00, 0x02, 0x00, 0x00, 0x04, 0x00)NEWLINE MESS = (0x43, 0x4D)NEWLINE NEWLINE OFF1 = 0x31NEWLINE OFF2 = 0x37NEWLINENEWLINE code1 = OFF1NEWLINE code2 = OFF2NEWLINENEWLINE def __init__(self):NEWLINE try:NEWLINE self.dev=UsbDevice(0x0c45, 0x11)NEWLINE self.dev.open()NEWLINE self.dev.handle.reset()NEWLINE self.resetMessage()NEWLINE except NoBuddyException, e:NEWLINE raise NoBuddyException()NEWLINE NEWLINE def resetMessage(self):NEWLINE self.code1 = self.OFF1NEWLINE self.code1 = self.OFF1NEWLINE self.send()NEWLINENEWLINE def send(self):NEWLINE try:NEWLINE self.dev.handle.controlMsg(0x21, 0x09, self.SETUP, 0x0200, 0x00)NEWLINE self.dev.handle.controlMsg(0x21, 0x09, self.MESS+(self.code1,self.code2), 0x0200, 0x00)NEWLINE except usb.USBError:NEWLINE log.info("Error sending USB command")NEWLINE raise NoBuddyException()NEWLINENEWLINE#####################NEWLINE# USB classNEWLINE######################NEWLINENEWLINEclass UsbDevice:NEWLINE def __init__(self, vendor_id, product_id):NEWLINE busses = usb.busses()NEWLINE self.handle = NoneNEWLINE for bus in busses:NEWLINE devices = bus.devicesNEWLINE for dev in devices:NEWLINE if dev.idVendor==vendor_id and dev.idProduct==product_id:NEWLINE log.info("DX e-buddy found!")NEWLINE# log.info("vend %s prod %s",dev.idVendor, dev.idProduct)NEWLINE self.dev = devNEWLINE self.conf = self.dev.configurations[0]NEWLINE self.intf = self.conf.interfaces[0][0]NEWLINE self.endpoints = []NEWLINE# log.info("interface = %x, class = %s, protocol = %s", self.intf.interfaceNumber, self.intf.interfaceClass, self.intf.interfaceProtocol)NEWLINE for endpoint in self.intf.endpoints:NEWLINE self.endpoints.append(endpoint)NEWLINE# log.info("endpoint number = %x, type = %s", endpoint.address, endpoint.type)NEWLINE returnNEWLINE raise NoBuddyException()NEWLINENEWLINE def open(self):NEWLINE if self.handle:NEWLINE self.handle = NoneNEWLINE self.handle = self.dev.open()NEWLINENEWLINE# if self.handle:NEWLINE# log.info("Handle OK")NEWLINENEWLINE #We need to detach HID interfaceNEWLINE try:NEWLINE self.handle.detachKernelDriver(0)NEWLINE self.handle.detachKernelDriver(1)NEWLINE except:NEWLINE passNEWLINENEWLINE try:NEWLINE self.handle.setConfiguration(self.conf)NEWLINE self.handle.claimInterface(0)NEWLINE self.handle.setAltInterface(0)NEWLINE except:NEWLINE log.info("Configuration failed")NEWLINE raise NoBuddyException()NEWLINENEWLINE# log.info("Device opened OK")NEWLINENEWLINEclass NoBuddyException(Exception): passNEWLINENEWLINENEWLINE#########################################NEWLINE# Decoding macrosNEWLINE##########################################NEWLINENEWLINENEWLINEdef decode_buddy (buddy,msg):NEWLINE# log.info("Received message: %s",msg)NEWLINE buddy.code1 = int(msg)/10 + 0x30NEWLINE buddy.code2 = int(msg) - (int(msg)/10)*10 + 0x30NEWLINE# log.info("Codes: %x %x",buddy.code1,buddy.code2)NEWLINENEWLINE#######################################NEWLINE# MAIN programNEWLINE#######################################NEWLINENEWLINElog = logging.getLogger('pybuddy')NEWLINENEWLINE#Default configNEWLINEconfig = RawConfigParser(NEWLINE { 'port': 8888,NEWLINE 'address': '127.0.0.1',NEWLINE 'user': 'nobody',NEWLINE 'loglevel': 'info',NEWLINE 'logfile': 'console',NEWLINE }NEWLINE)NEWLINENEWLINEconfig._sections = {'network':{}, 'system':{}}NEWLINENEWLINEconfig_files = [ "~/.pybuddy.cfg", NEWLINE "/etc/pybuddy/pybuddy.cfg", NEWLINE "/usr/local/etc/pybuddy.cfg"NEWLINE]NEWLINENEWLINE#Parse configNEWLINEif len(sys.argv) > 1:NEWLINE config_files.append(sys.argv[1])NEWLINE NEWLINEconfig_read = config.read(config_files)NEWLINENEWLINEif config.get("system", "logfile") != "console":NEWLINE logging.basicConfig(NEWLINE filename=config.get("system", "logfile"),NEWLINE format='%(asctime)s %(levelname)-8s %(message)s',NEWLINE )NEWLINEelse:NEWLINE logging.basicConfig(NEWLINE stream=sys.stderr,NEWLINE format='%(asctime)s %(levelname)-8s %(message)s',NEWLINE )NEWLINENEWLINENEWLINEif config.get("system", "loglevel") == "debug":NEWLINE log.setLevel(logging.DEBUG)NEWLINEelif config.get("system", "loglevel") == "info":NEWLINE log.setLevel(logging.INFO)NEWLINENEWLINENEWLINEif config_read:NEWLINE log.info("Read config file: %s", config_read[0])NEWLINE NEWLINE#Initialize deviceNEWLINElog.info("Searching e-buddy...")NEWLINEtry:NEWLINE buddy=BuddyDevice()NEWLINEexcept NoBuddyException, e:NEWLINE log.error("Not found or ERROR!")NEWLINE sys.exit(1)NEWLINENEWLINENEWLINE#DaemonizeNEWLINElog.info("Starting daemon...")NEWLINEif os.fork()==0:NEWLINE os.setsid()NEWLINEelse:NEWLINE sys.exit(0)NEWLINENEWLINE#Create server socketNEWLINEs = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)NEWLINEs.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)NEWLINEs.bind((config.get("network", "address"), int(config.get("network", "port"))))NEWLINENEWLINE#Drop privilegesNEWLINEtry:NEWLINE uid = pwd.getpwnam(config.get("system", "user"))[2]NEWLINEexcept KeyError:NEWLINE log.error("Username %s not found, exiting...", config.get("system", "user"))NEWLINE sys.exit(1)NEWLINEos.setuid(uid)NEWLINENEWLINENEWLINE#Main message loopNEWLINEwhile 1:NEWLINE try:NEWLINE message, address = s.recvfrom(8192)NEWLINE# log.debug("Got data from %s", address)NEWLINE decode_buddy(buddy, message)NEWLINE buddy.send()NEWLINE except (KeyboardInterrupt, SystemExit):NEWLINE raiseNEWLINENEWLINE NEWLINE |
from rest_framework.status import HTTP_404_NOT_FOUNDNEWLINENEWLINENEWLINEERROR_GRID_DOES_NOT_EXIST = (NEWLINE "ERROR_GRID_DOES_NOT_EXIST",NEWLINE HTTP_404_NOT_FOUND,NEWLINE "The requested grid view does not exist.",NEWLINE)NEWLINE |
# coding=utf-8NEWLINE# --------------------------------------------------------------------------NEWLINE# Copyright (c) Microsoft Corporation. All rights reserved.NEWLINE# Licensed under the MIT License. See License.txt in the project root for license information.NEWLINE# Code generated by Microsoft (R) AutoRest Code Generator.NEWLINE# Changes may cause incorrect behavior and will be lost if the code is regenerated.NEWLINE# --------------------------------------------------------------------------NEWLINENEWLINEfrom typing import TYPE_CHECKINGNEWLINENEWLINEfrom azure.core.configuration import ConfigurationNEWLINEfrom azure.core.pipeline import policiesNEWLINEfrom azure.mgmt.core.policies import ARMChallengeAuthenticationPolicy, ARMHttpLoggingPolicyNEWLINENEWLINEif TYPE_CHECKING:NEWLINE # pylint: disable=unused-import,ungrouped-importsNEWLINE from typing import AnyNEWLINENEWLINE from azure.core.credentials import AzureKeyCredentialNEWLINENEWLINEVERSION = "unknown"NEWLINENEWLINEclass MultiapiServiceClientConfiguration(Configuration):NEWLINE """Configuration for MultiapiServiceClient.NEWLINENEWLINE Note that all parameters used to create this instance are saved as instanceNEWLINE attributes.NEWLINENEWLINE :param credential: Credential needed for the client to connect to Azure.NEWLINE :type credential: ~azure.core.credentials.AzureKeyCredentialNEWLINE """NEWLINENEWLINE def __init__(NEWLINE self,NEWLINE credential, # type: AzureKeyCredentialNEWLINE **kwargs # type: AnyNEWLINE ):NEWLINE # type: (...) -> NoneNEWLINE if credential is None:NEWLINE raise ValueError("Parameter 'credential' must not be None.")NEWLINE super(MultiapiServiceClientConfiguration, self).__init__(**kwargs)NEWLINENEWLINE self.credential = credentialNEWLINE self.api_version = "3.0.0"NEWLINE kwargs.setdefault('sdk_moniker', 'multiapicredentialdefaultpolicy/{}'.format(VERSION))NEWLINE self._configure(**kwargs)NEWLINENEWLINE def _configure(NEWLINE self,NEWLINE **kwargs # type: AnyNEWLINE ):NEWLINE # type: (...) -> NoneNEWLINE self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)NEWLINE self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)NEWLINE self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)NEWLINE self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)NEWLINE self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs)NEWLINE self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs)NEWLINE self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)NEWLINE self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs)NEWLINE self.authentication_policy = kwargs.get('authentication_policy')NEWLINE if self.credential and not self.authentication_policy:NEWLINE self.authentication_policy = policies.AzureKeyCredentialPolicy(self.credential, "api-key", **kwargs)NEWLINE |
# coding=utf-8NEWLINE# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***NEWLINE# *** Do not edit by hand unless you're certain you know what you are doing! ***NEWLINENEWLINEimport jsonNEWLINEimport warningsNEWLINEimport pulumiNEWLINEimport pulumi.runtimeNEWLINEfrom typing import UnionNEWLINEfrom .. import utilities, tablesNEWLINENEWLINEclass Registry(pulumi.CustomResource):NEWLINE admin_enabled: pulumi.Output[bool]NEWLINE """NEWLINE Specifies whether the admin user is enabled. Defaults to `false`.NEWLINE """NEWLINE admin_password: pulumi.Output[str]NEWLINE """NEWLINE The Password associated with the Container Registry Admin account - if the admin account is enabled.NEWLINE """NEWLINE admin_username: pulumi.Output[str]NEWLINE """NEWLINE The Username associated with the Container Registry Admin account - if the admin account is enabled.NEWLINE """NEWLINE georeplication_locations: pulumi.Output[list]NEWLINE """NEWLINE A list of Azure locations where the container registry should be geo-replicated.NEWLINE """NEWLINE location: pulumi.Output[str]NEWLINE """NEWLINE Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.NEWLINE """NEWLINE login_server: pulumi.Output[str]NEWLINE """NEWLINE The URL that can be used to log into the container registry.NEWLINE """NEWLINE name: pulumi.Output[str]NEWLINE """NEWLINE Specifies the name of the Container Registry. Changing this forces a new resource to be created.NEWLINE """NEWLINE network_rule_set: pulumi.Output[dict]NEWLINE """NEWLINE A `network_rule_set` block as documented below.NEWLINENEWLINE * `default_action` (`str`) - The behaviour for requests matching no rules. Either `Allow` or `Deny`. Defaults to `Allow`NEWLINE * `ip_rules` (`list`) - One or more `ip_rule` blocks as defined below.NEWLINE * `action` (`str`) - The behaviour for requests matching this rule. At this time the only supported value is `Allow`NEWLINE * `ipRange` (`str`) - The CIDR block from which requests will match the rule.NEWLINENEWLINE * `virtualNetworks` (`list`) - One or more `virtual_network` blocks as defined below.NEWLINE * `action` (`str`) - The behaviour for requests matching this rule. At this time the only supported value is `Allow`NEWLINE * `subnet_id` (`str`) - The subnet id from which requests will match the rule.NEWLINE """NEWLINE resource_group_name: pulumi.Output[str]NEWLINE """NEWLINE The name of the resource group in which to create the Container Registry. Changing this forces a new resource to be created.NEWLINE """NEWLINE sku: pulumi.Output[str]NEWLINE """NEWLINE The SKU name of the container registry. Possible values are `Basic`, `Standard` and `Premium`. `Classic` (which was previously `Basic`) is supported only for existing resources.NEWLINE """NEWLINE storage_account_id: pulumi.Output[str]NEWLINE """NEWLINE The ID of a Storage Account which must be located in the same Azure Region as the Container Registry.NEWLINE """NEWLINE tags: pulumi.Output[dict]NEWLINE """NEWLINE A mapping of tags to assign to the resource.NEWLINE """NEWLINE def __init__(__self__, resource_name, opts=None, admin_enabled=None, georeplication_locations=None, location=None, name=None, network_rule_set=None, resource_group_name=None, sku=None, storage_account_id=None, tags=None, __props__=None, __name__=None, __opts__=None):NEWLINE """NEWLINE Manages an Azure Container Registry.NEWLINENEWLINE ## Example UsageNEWLINENEWLINENEWLINENEWLINE ```pythonNEWLINE import pulumiNEWLINE import pulumi_azure as azureNEWLINENEWLINE rg = azure.core.ResourceGroup("rg", location="West US")NEWLINE acr = azure.containerservice.Registry("acr",NEWLINE resource_group_name=rg.name,NEWLINE location=rg.location,NEWLINE sku="Premium",NEWLINE admin_enabled=False,NEWLINE georeplication_locations=[NEWLINE "East US",NEWLINE "West Europe",NEWLINE ])NEWLINE ```NEWLINENEWLINENEWLINE :param str resource_name: The name of the resource.NEWLINE :param pulumi.ResourceOptions opts: Options for the resource.NEWLINE :param pulumi.Input[bool] admin_enabled: Specifies whether the admin user is enabled. Defaults to `false`.NEWLINE :param pulumi.Input[list] georeplication_locations: A list of Azure locations where the container registry should be geo-replicated.NEWLINE :param pulumi.Input[str] location: Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.NEWLINE :param pulumi.Input[str] name: Specifies the name of the Container Registry. Changing this forces a new resource to be created.NEWLINE :param pulumi.Input[dict] network_rule_set: A `network_rule_set` block as documented below.NEWLINE :param pulumi.Input[str] resource_group_name: The name of the resource group in which to create the Container Registry. Changing this forces a new resource to be created.NEWLINE :param pulumi.Input[str] sku: The SKU name of the container registry. Possible values are `Basic`, `Standard` and `Premium`. `Classic` (which was previously `Basic`) is supported only for existing resources.NEWLINE :param pulumi.Input[str] storage_account_id: The ID of a Storage Account which must be located in the same Azure Region as the Container Registry.NEWLINE :param pulumi.Input[dict] tags: A mapping of tags to assign to the resource.NEWLINENEWLINE The **network_rule_set** object supports the following:NEWLINENEWLINE * `default_action` (`pulumi.Input[str]`) - The behaviour for requests matching no rules. Either `Allow` or `Deny`. Defaults to `Allow`NEWLINE * `ip_rules` (`pulumi.Input[list]`) - One or more `ip_rule` blocks as defined below.NEWLINE * `action` (`pulumi.Input[str]`) - The behaviour for requests matching this rule. At this time the only supported value is `Allow`NEWLINE * `ipRange` (`pulumi.Input[str]`) - The CIDR block from which requests will match the rule.NEWLINENEWLINE * `virtualNetworks` (`pulumi.Input[list]`) - One or more `virtual_network` blocks as defined below.NEWLINE * `action` (`pulumi.Input[str]`) - The behaviour for requests matching this rule. At this time the only supported value is `Allow`NEWLINE * `subnet_id` (`pulumi.Input[str]`) - The subnet id from which requests will match the rule.NEWLINE """NEWLINE if __name__ is not None:NEWLINE warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)NEWLINE resource_name = __name__NEWLINE if __opts__ is not None:NEWLINE warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)NEWLINE opts = __opts__NEWLINE if opts is None:NEWLINE opts = pulumi.ResourceOptions()NEWLINE if not isinstance(opts, pulumi.ResourceOptions):NEWLINE raise TypeError('Expected resource options to be a ResourceOptions instance')NEWLINE if opts.version is None:NEWLINE opts.version = utilities.get_version()NEWLINE if opts.id is None:NEWLINE if __props__ is not None:NEWLINE raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')NEWLINE __props__ = dict()NEWLINENEWLINE __props__['admin_enabled'] = admin_enabledNEWLINE __props__['georeplication_locations'] = georeplication_locationsNEWLINE __props__['location'] = locationNEWLINE __props__['name'] = nameNEWLINE __props__['network_rule_set'] = network_rule_setNEWLINE if resource_group_name is None:NEWLINE raise TypeError("Missing required property 'resource_group_name'")NEWLINE __props__['resource_group_name'] = resource_group_nameNEWLINE __props__['sku'] = skuNEWLINE __props__['storage_account_id'] = storage_account_idNEWLINE __props__['tags'] = tagsNEWLINE __props__['admin_password'] = NoneNEWLINE __props__['admin_username'] = NoneNEWLINE __props__['login_server'] = NoneNEWLINE super(Registry, __self__).__init__(NEWLINE 'azure:containerservice/registry:Registry',NEWLINE resource_name,NEWLINE __props__,NEWLINE opts)NEWLINENEWLINE @staticmethodNEWLINE def get(resource_name, id, opts=None, admin_enabled=None, admin_password=None, admin_username=None, georeplication_locations=None, location=None, login_server=None, name=None, network_rule_set=None, resource_group_name=None, sku=None, storage_account_id=None, tags=None):NEWLINE """NEWLINE Get an existing Registry resource's state with the given name, id, and optional extraNEWLINE properties used to qualify the lookup.NEWLINENEWLINE :param str resource_name: The unique name of the resulting resource.NEWLINE :param str id: The unique provider ID of the resource to lookup.NEWLINE :param pulumi.ResourceOptions opts: Options for the resource.NEWLINE :param pulumi.Input[bool] admin_enabled: Specifies whether the admin user is enabled. Defaults to `false`.NEWLINE :param pulumi.Input[str] admin_password: The Password associated with the Container Registry Admin account - if the admin account is enabled.NEWLINE :param pulumi.Input[str] admin_username: The Username associated with the Container Registry Admin account - if the admin account is enabled.NEWLINE :param pulumi.Input[list] georeplication_locations: A list of Azure locations where the container registry should be geo-replicated.NEWLINE :param pulumi.Input[str] location: Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.NEWLINE :param pulumi.Input[str] login_server: The URL that can be used to log into the container registry.NEWLINE :param pulumi.Input[str] name: Specifies the name of the Container Registry. Changing this forces a new resource to be created.NEWLINE :param pulumi.Input[dict] network_rule_set: A `network_rule_set` block as documented below.NEWLINE :param pulumi.Input[str] resource_group_name: The name of the resource group in which to create the Container Registry. Changing this forces a new resource to be created.NEWLINE :param pulumi.Input[str] sku: The SKU name of the container registry. Possible values are `Basic`, `Standard` and `Premium`. `Classic` (which was previously `Basic`) is supported only for existing resources.NEWLINE :param pulumi.Input[str] storage_account_id: The ID of a Storage Account which must be located in the same Azure Region as the Container Registry.NEWLINE :param pulumi.Input[dict] tags: A mapping of tags to assign to the resource.NEWLINENEWLINE The **network_rule_set** object supports the following:NEWLINENEWLINE * `default_action` (`pulumi.Input[str]`) - The behaviour for requests matching no rules. Either `Allow` or `Deny`. Defaults to `Allow`NEWLINE * `ip_rules` (`pulumi.Input[list]`) - One or more `ip_rule` blocks as defined below.NEWLINE * `action` (`pulumi.Input[str]`) - The behaviour for requests matching this rule. At this time the only supported value is `Allow`NEWLINE * `ipRange` (`pulumi.Input[str]`) - The CIDR block from which requests will match the rule.NEWLINENEWLINE * `virtualNetworks` (`pulumi.Input[list]`) - One or more `virtual_network` blocks as defined below.NEWLINE * `action` (`pulumi.Input[str]`) - The behaviour for requests matching this rule. At this time the only supported value is `Allow`NEWLINE * `subnet_id` (`pulumi.Input[str]`) - The subnet id from which requests will match the rule.NEWLINE """NEWLINE opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))NEWLINENEWLINE __props__ = dict()NEWLINENEWLINE __props__["admin_enabled"] = admin_enabledNEWLINE __props__["admin_password"] = admin_passwordNEWLINE __props__["admin_username"] = admin_usernameNEWLINE __props__["georeplication_locations"] = georeplication_locationsNEWLINE __props__["location"] = locationNEWLINE __props__["login_server"] = login_serverNEWLINE __props__["name"] = nameNEWLINE __props__["network_rule_set"] = network_rule_setNEWLINE __props__["resource_group_name"] = resource_group_nameNEWLINE __props__["sku"] = skuNEWLINE __props__["storage_account_id"] = storage_account_idNEWLINE __props__["tags"] = tagsNEWLINE return Registry(resource_name, opts=opts, __props__=__props__)NEWLINE def translate_output_property(self, prop):NEWLINE return tables._CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or propNEWLINENEWLINE def translate_input_property(self, prop):NEWLINE return tables._SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or propNEWLINENEWLINE |
"""!NEWLINE@brief Improved Experiment Argument Parser for SudoRmRfNEWLINENEWLINE@author Efthymios Tzinis {[email protected]}NEWLINE@copyright University of Illinois at Urbana-ChampaignNEWLINE"""NEWLINENEWLINEimport argparseNEWLINENEWLINENEWLINEdef get_args():NEWLINE """! Command line parser """NEWLINE parser = argparse.ArgumentParser(NEWLINE description='Experiment Argument Parser')NEWLINE # ===============================================NEWLINE # Datasets argumentsNEWLINE parser.add_argument("--train", type=str, nargs='+',NEWLINE help="Training dataset",NEWLINE default=None,NEWLINE choices=['WHAM', 'LIBRI2MIX', 'MUSDB', 'FUSS'])NEWLINE parser.add_argument("--val", type=str, nargs='+',NEWLINE help="Validation dataset",NEWLINE default=None,NEWLINE choices=['WHAM', 'LIBRI2MIX', 'MUSDB', 'FUSS'])NEWLINE parser.add_argument("--test", type=str, nargs='+',NEWLINE help="Test dataset",NEWLINE default=None,NEWLINE choices=['WHAM', 'LIBRI2MIX', 'MUSDB', 'FUSS'])NEWLINE parser.add_argument("--train_val", type=str, nargs='+',NEWLINE help="Validation on the training data",NEWLINE default=None,NEWLINE choices=['WHAM', 'LIBRI2MIX'])NEWLINE parser.add_argument("--n_train", type=int,NEWLINE help="""Reduce the number of training NEWLINE samples to this number.""", default=0)NEWLINE parser.add_argument("--n_val", type=int,NEWLINE help="""Reduce the number of evaluation NEWLINE samples to this number.""", default=0)NEWLINE parser.add_argument("--n_test", type=int,NEWLINE help="""Reduce the number of test NEWLINE samples to this number.""", default=0)NEWLINE parser.add_argument("--n_train_val", type=int,NEWLINE help="""Reduce the number of evaluation NEWLINE samples on the training set.""", default=0)NEWLINE parser.add_argument("--audio_timelength", type=float,NEWLINE help="""The timelength of the audio that you want NEWLINE to load in seconds.""",NEWLINE default=4.)NEWLINE parser.add_argument("--min_or_max", type=str,NEWLINE help="""Min or max if this applies to the dataset NEWLINE that you use. Min means that the mixture is going to NEWLINE be cropped at the minimum of all sources and for max NEWLINE is going to be zero-padded""",NEWLINE default='min',NEWLINE choices=['min', 'max'])NEWLINE parser.add_argument("--zero_pad_audio", action='store_true',NEWLINE help="""If a specific timelength is required all NEWLINE audio sources and mixtures are going to be zero NEWLINE padded in order to have the required length. If not NEWLINE and a specific timelegth is required then the files NEWLINE with less than required legth are not going to be NEWLINE used.""", default=False)NEWLINE parser.add_argument("--normalize_audio", action='store_true',NEWLINE help="""Normalize using mean and standard deviation NEWLINE before processing each audio file.""",NEWLINE default=False)NEWLINE # ===============================================NEWLINE # Separation task argumentsNEWLINE parser.add_argument("--n_channels", type=int,NEWLINE help="""The number of mixture channels.""",NEWLINE default=1, choices=[1, 2])NEWLINE parser.add_argument("--min_num_sources", type=int,NEWLINE help="""The minimum number of sources in a mixture.""",NEWLINE default=1)NEWLINE parser.add_argument("--max_num_sources", type=int,NEWLINE help="""The maximum number of sources in a mixture.""",NEWLINE default=4)NEWLINE parser.add_argument("--separation_task", type=str,NEWLINE help="The separation task you would like to perform, "NEWLINE "some of the tasks might not be available for "NEWLINE "specific datasets.",NEWLINE default=None,NEWLINE choices=['enhance_single_white_noise',NEWLINE 'enhance_single', 'enhance_both',NEWLINE 'sep_clean', 'sep_noisy'])NEWLINE # ===============================================NEWLINE # Training paramsNEWLINE parser.add_argument("-bs", "--batch_size", type=int,NEWLINE help="""The number of samples in each batch. NEWLINE Warning: Cannot be less than the number of NEWLINE the validation samples""", default=4)NEWLINE parser.add_argument("--n_epochs", type=int,NEWLINE help="""The number of epochs that the NEWLINE experiment should run""", default=500)NEWLINE parser.add_argument("-lr", "--learning_rate", type=float,NEWLINE help="""Initial Learning rate""", default=1e-3)NEWLINE parser.add_argument("--divide_lr_by", type=float,NEWLINE help="""The factor that the learning rate NEWLINE would be divided by""", default=3.)NEWLINE parser.add_argument("--patience", type=int,NEWLINE help="""Patience until reducing the learning rate .""",NEWLINE default=5)NEWLINE parser.add_argument("--optimizer", type=str,NEWLINE help="""The optimizer that you want to use""",NEWLINE default="adam",NEWLINE choices=['adam', 'radam'])NEWLINE parser.add_argument("--clip_grad_norm", type=float,NEWLINE help="""The norm value which all gradients NEWLINE are going to be clipped, 0 means that no NEWLINE grads are going to be clipped""",NEWLINE default=5.)NEWLINE parser.add_argument("-fs", type=int,NEWLINE help="""Sampling rate of the audio.""", default=8000)NEWLINE # ===============================================NEWLINE # CometML experiment configuration argumentsNEWLINE parser.add_argument("-tags", "--cometml_tags", type=str,NEWLINE nargs="+", help="""A list of tags for the cometml NEWLINE experiment.""",NEWLINE default=[])NEWLINE parser.add_argument("--experiment_name", type=str,NEWLINE help="""Name of current experiment""",NEWLINE default=None)NEWLINE parser.add_argument("--project_name", type=str,NEWLINE help="""Name of current experiment""",NEWLINE default="yolo_experiment")NEWLINE # ===============================================NEWLINE # Device paramsNEWLINE parser.add_argument("-cad", "--cuda_available_devices", type=str,NEWLINE nargs="+",NEWLINE help="""A list of Cuda IDs that would be NEWLINE available for running this experiment""",NEWLINE default=['0'],NEWLINE choices=['0', '1', '2', '3'])NEWLINE parser.add_argument("--n_jobs", type=int,NEWLINE help="""The number of cpu workers for NEWLINE loading the data, etc.""", default=4)NEWLINE # ===============================================NEWLINE # Local experiment loggingNEWLINE parser.add_argument("-elp", "--experiment_logs_path", type=str,NEWLINE help="""Path for logging experiment's audio.""",NEWLINE default=None)NEWLINE parser.add_argument("-mlp", "--metrics_logs_path", type=str,NEWLINE help="""Path for logging metrics.""",NEWLINE default=None)NEWLINE parser.add_argument("-clp", "--checkpoints_path", type=str,NEWLINE help="""Path for logging checkpoints.""",NEWLINE default=None)NEWLINE parser.add_argument("--save_checkpoint_every", type=int,NEWLINE help="""Number of epochs between each model save.""",NEWLINE default=0)NEWLINE # ===============================================NEWLINE # Separation model (SuDO-RM-RF) paramsNEWLINE parser.add_argument("--out_channels", type=int,NEWLINE help="The number of channels of the internal "NEWLINE "representation outside the U-Blocks.",NEWLINE default=128)NEWLINE parser.add_argument("--in_channels", type=int,NEWLINE help="The number of channels of the internal "NEWLINE "representation inside the U-Blocks.",NEWLINE default=512)NEWLINE parser.add_argument("--num_blocks", type=int,NEWLINE help="Number of the successive U-Blocks.",NEWLINE default=16)NEWLINE parser.add_argument("--upsampling_depth", type=int,NEWLINE help="Number of successive upsamplings and "NEWLINE "effectively downsampling inside each U-Block. "NEWLINE "The aggregation of all scales is performed by "NEWLINE "addition.",NEWLINE default=5)NEWLINE parser.add_argument("--group_size", type=int,NEWLINE help="The number of individual computation groups "NEWLINE "applied if group communication module is used.",NEWLINE default=16)NEWLINE parser.add_argument("--enc_kernel_size", type=int,NEWLINE help="The width of the encoder and decoder kernels.",NEWLINE default=21)NEWLINE parser.add_argument("--enc_num_basis", type=int,NEWLINE help="Number of the encoded basis representations.",NEWLINE default=512)NEWLINENEWLINE # Attentive sudo parametersNEWLINE parser.add_argument("--att_dims", type=int,NEWLINE help="The number of attention depth.",NEWLINE default=256)NEWLINE parser.add_argument("--att_n_heads", type=int,NEWLINE help="The number of attention heads.",NEWLINE default=4)NEWLINE parser.add_argument("--att_dropout", type=float,NEWLINE help="The dropout rate inside the attention layers.",NEWLINE default=0.1)NEWLINENEWLINE parser.add_argument("--model_type", type=str,NEWLINE help="The type of model you would like to use.",NEWLINE default='relu',NEWLINE choices=['relu', 'softmax', 'groupcomm',NEWLINE 'groupcomm_v2', 'causal',NEWLINE 'attention', 'attention_v2',NEWLINE 'attention_v3', 'sepformer'])NEWLINENEWLINE return parser.parse_args() |
from __future__ import absolute_import, unicode_literalsNEWLINEimport pytestNEWLINEimport loggingNEWLINEimport osNEWLINENEWLINEfrom psd_tools.api import pil_ioNEWLINEfrom psd_tools.api.psd_image import PSDImageNEWLINEfrom psd_tools.constants import ColorModeNEWLINEfrom psd_tools.psd.patterns import PatternNEWLINEfrom ..utils import TEST_ROOT, full_nameNEWLINENEWLINElogger = logging.getLogger(__name__)[email protected](NEWLINE 'mode', [NEWLINE 'L',NEWLINE 'LA',NEWLINE 'RGB',NEWLINE 'RGBA',NEWLINE 'CMYK',NEWLINE 'CMYKA',NEWLINE 'LAB',NEWLINE '1',NEWLINE ]NEWLINE)NEWLINEdef test_get_color_mode(mode):NEWLINE assert isinstance(pil_io.get_color_mode(mode), ColorMode)[email protected](NEWLINE 'mode, alpha, expected',NEWLINE [NEWLINE (ColorMode.BITMAP, False, '1'),NEWLINE (ColorMode.GRAYSCALE, False, 'L'),NEWLINE (ColorMode.GRAYSCALE, True, 'LA'),NEWLINE (ColorMode.RGB, False, 'RGB'),NEWLINE (ColorMode.RGB, True, 'RGBA'),NEWLINE (ColorMode.CMYK, False, 'CMYK'),NEWLINE (ColorMode.CMYK, True, 'CMYK'), # CMYK with alpha is not supported.NEWLINE (ColorMode.LAB, False, 'LAB'),NEWLINE ]NEWLINE)NEWLINEdef test_get_pil_mode(mode, alpha, expected):NEWLINE assert pil_io.get_pil_mode(mode, alpha) == expectedNEWLINENEWLINENEWLINEdef test_convert_pattern_to_pil():NEWLINE filepath = os.path.join(TEST_ROOT, 'tagged_blocks', 'Patt_1.dat')NEWLINE with open(filepath, 'rb') as f:NEWLINE pattern = Pattern.read(f)NEWLINENEWLINE assert pil_io.convert_pattern_to_pil(pattern)NEWLINENEWLINENEWLINEdef test_apply_icc_profile():NEWLINE filepath = full_name('colorprofiles/north_america_newspaper.psd')NEWLINE psd = PSDImage.open(filepath)NEWLINE no_icc = psd.topil(apply_icc=False)NEWLINE with_icc = psd.topil(apply_icc=True)NEWLINE assert no_icc.getextrema() != with_icc.getextrema()NEWLINE |
from IPython import get_ipythonNEWLINEfrom IPython.core.magic import (magics_class, line_magic)NEWLINEfrom IPython.core.magics.osm import OSMagicsNEWLINEfrom johnstarich.ipython.shell import find_varNEWLINEimport keywordNEWLINEimport shutilNEWLINENEWLINENEWLINE@magics_classNEWLINEclass Bashisms(OSMagics):NEWLINE @propertyNEWLINE def _exit_code(self) -> int:NEWLINE return self.shell.user_ns['_exit_code']NEWLINENEWLINE @_exit_code.setterNEWLINE def _exit_code(self, value: int):NEWLINE self.shell.user_ns['_exit_code'] = valueNEWLINENEWLINE @line_magicNEWLINE def echo(self, line: str):NEWLINE "Simply print out its received arguments."NEWLINE print(line.format(**vars(), **globals()))NEWLINE self._exit_code = 0NEWLINE returnNEWLINENEWLINE @line_magicNEWLINE def cd(self, parameter_s=''):NEWLINE super(Bashisms, self).cd('-q ' + parameter_s)NEWLINENEWLINE @line_magicNEWLINE def which(self, line):NEWLINE var_location = find_var(self.shell, line)NEWLINE if var_location is not None:NEWLINE print(var_location.get(line))NEWLINE self._exit_code = 0NEWLINE returnNEWLINENEWLINE if keyword.iskeyword(line):NEWLINE help(line)NEWLINE self._exit_code = 0NEWLINE returnNEWLINENEWLINE ex = shutil.which(line)NEWLINE if ex is not None:NEWLINE print(ex)NEWLINE self._exit_code = 0NEWLINE returnNEWLINE else:NEWLINE print('"{}" could not be found on $PATH'NEWLINE .format(line))NEWLINE self._exit_code = 1NEWLINE returnNEWLINENEWLINEip = get_ipython()NEWLINEip.register_magics(Bashisms)NEWLINEdel ipNEWLINE |
def troca(numeros):NEWLINE cont = 0NEWLINE for x in numeros:NEWLINE if numeros[cont] > 0:NEWLINE numeros[cont] = 1NEWLINE else:NEWLINE numeros[cont] = 0NEWLINE cont = cont + 1NEWLINE print(numeros)NEWLINENEWLINENEWLINENEWLINEnumeros = []NEWLINEfor i in range(30):NEWLINE numeros.append(int(input("Digite um numero: ")))NEWLINEprint(numeros)NEWLINEtroca(numeros)NEWLINENEWLINENEWLINE |
"""NEWLINESimple recurrent model - either with LSTM or GRU cells.NEWLINE"""NEWLINEfrom copy import copyNEWLINEfrom typing import Dict, List, Tuple, UnionNEWLINENEWLINEimport numpy as npNEWLINEimport torchNEWLINEimport torch.nn as nnNEWLINENEWLINEfrom pytorch_forecasting.data.encoders import MultiNormalizer, NaNLabelEncoderNEWLINEfrom pytorch_forecasting.data.timeseries import TimeSeriesDataSetNEWLINEfrom pytorch_forecasting.metrics import MAE, MAPE, MASE, RMSE, SMAPE, MultiHorizonMetric, MultiLoss, QuantileLossNEWLINEfrom pytorch_forecasting.models.base_model import AutoRegressiveBaseModelWithCovariatesNEWLINEfrom pytorch_forecasting.models.nn import HiddenState, MultiEmbedding, get_rnnNEWLINEfrom pytorch_forecasting.utils import apply_to_list, to_listNEWLINENEWLINENEWLINEclass RecurrentNetwork(AutoRegressiveBaseModelWithCovariates):NEWLINE def __init__(NEWLINE self,NEWLINE cell_type: str = "LSTM",NEWLINE hidden_size: int = 10,NEWLINE rnn_layers: int = 2,NEWLINE dropout: float = 0.1,NEWLINE static_categoricals: List[str] = [],NEWLINE static_reals: List[str] = [],NEWLINE time_varying_categoricals_encoder: List[str] = [],NEWLINE time_varying_categoricals_decoder: List[str] = [],NEWLINE categorical_groups: Dict[str, List[str]] = {},NEWLINE time_varying_reals_encoder: List[str] = [],NEWLINE time_varying_reals_decoder: List[str] = [],NEWLINE embedding_sizes: Dict[str, Tuple[int, int]] = {},NEWLINE embedding_paddings: List[str] = [],NEWLINE embedding_labels: Dict[str, np.ndarray] = {},NEWLINE x_reals: List[str] = [],NEWLINE x_categoricals: List[str] = [],NEWLINE output_size: Union[int, List[int]] = 1,NEWLINE target: Union[str, List[str]] = None,NEWLINE target_lags: Dict[str, List[int]] = {},NEWLINE loss: MultiHorizonMetric = None,NEWLINE logging_metrics: nn.ModuleList = None,NEWLINE **kwargs,NEWLINE ):NEWLINE """NEWLINE Recurrent Network.NEWLINENEWLINE Simple LSTM or GRU layer followed by output layerNEWLINENEWLINE Args:NEWLINE cell_type (str, optional): Recurrent cell type ["LSTM", "GRU"]. Defaults to "LSTM".NEWLINE hidden_size (int, optional): hidden recurrent size - the most important hyperparameter along withNEWLINE ``rnn_layers``. Defaults to 10.NEWLINE rnn_layers (int, optional): Number of RNN layers - important hyperparameter. Defaults to 2.NEWLINE dropout (float, optional): Dropout in RNN layers. Defaults to 0.1.NEWLINE static_categoricals: integer of positions of static categorical variablesNEWLINE static_reals: integer of positions of static continuous variablesNEWLINE time_varying_categoricals_encoder: integer of positions of categorical variables for encoderNEWLINE time_varying_categoricals_decoder: integer of positions of categorical variables for decoderNEWLINE time_varying_reals_encoder: integer of positions of continuous variables for encoderNEWLINE time_varying_reals_decoder: integer of positions of continuous variables for decoderNEWLINE categorical_groups: dictionary where valuesNEWLINE are list of categorical variables that are forming together a new categoricalNEWLINE variable which is the key in the dictionaryNEWLINE x_reals: order of continuous variables in tensor passed to forward functionNEWLINE x_categoricals: order of categorical variables in tensor passed to forward functionNEWLINE embedding_sizes: dictionary mapping (string) indices to tuple of number of categorical classes andNEWLINE embedding sizeNEWLINE embedding_paddings: list of indices for embeddings which transform the zero's embedding to a zero vectorNEWLINE embedding_labels: dictionary mapping (string) indices to list of categorical labelsNEWLINE output_size (Union[int, List[int]], optional): number of outputs (e.g. number of quantiles forNEWLINE QuantileLoss and one target or list of output sizes).NEWLINE target (str, optional): Target variable or list of target variables. Defaults to None.NEWLINE target_lags (Dict[str, Dict[str, int]]): dictionary of target names mapped to list of time steps byNEWLINE which the variable should be lagged.NEWLINE Lags can be useful to indicate seasonality to the models. If you know the seasonalit(ies) of your data,NEWLINE add at least the target variables with the corresponding lags to improve performance.NEWLINE Defaults to no lags, i.e. an empty dictionary.NEWLINE loss (MultiHorizonMetric, optional): loss: loss function taking prediction and targets.NEWLINE logging_metrics (nn.ModuleList, optional): Metrics to log during training.NEWLINE Defaults to nn.ModuleList([SMAPE(), MAE(), RMSE(), MAPE(), MASE()]).NEWLINE """NEWLINE if loss is None:NEWLINE loss = MAE()NEWLINE if logging_metrics is None:NEWLINE logging_metrics = nn.ModuleList([SMAPE(), MAE(), RMSE(), MAPE(), MASE()])NEWLINE self.save_hyperparameters()NEWLINE # store loss function separately as it is a moduleNEWLINE super().__init__(loss=loss, logging_metrics=logging_metrics, **kwargs)NEWLINENEWLINE self.embeddings = MultiEmbedding(NEWLINE embedding_sizes=embedding_sizes,NEWLINE embedding_paddings=embedding_paddings,NEWLINE categorical_groups=categorical_groups,NEWLINE x_categoricals=x_categoricals,NEWLINE )NEWLINENEWLINE lagged_target_names = [l for lags in target_lags.values() for l in lags]NEWLINE assert set(self.encoder_variables) - set(to_list(target)) - set(lagged_target_names) == set(NEWLINE self.decoder_variablesNEWLINE ), "Encoder and decoder variables have to be the same apart from target variable"NEWLINE for targeti in to_list(target):NEWLINE assert (NEWLINE targeti in time_varying_reals_encoderNEWLINE ), f"target {targeti} has to be real" # todo: remove this restrictionNEWLINE assert (isinstance(target, str) and isinstance(loss, MultiHorizonMetric)) or (NEWLINE isinstance(target, (list, tuple)) and isinstance(loss, MultiLoss) and len(loss) == len(target)NEWLINE ), "number of targets should be equivalent to number of loss metrics"NEWLINENEWLINE rnn_class = get_rnn(cell_type)NEWLINE cont_size = len(self.reals)NEWLINE cat_size = sum([size[1] for size in self.hparams.embedding_sizes.values()])NEWLINE input_size = cont_size + cat_sizeNEWLINE self.rnn = rnn_class(NEWLINE input_size=input_size,NEWLINE hidden_size=self.hparams.hidden_size,NEWLINE num_layers=self.hparams.rnn_layers,NEWLINE dropout=self.hparams.dropout if self.hparams.rnn_layers > 1 else 0,NEWLINE batch_first=True,NEWLINE )NEWLINENEWLINE # add linear layers for argument projectsNEWLINE if isinstance(target, str): # single targetNEWLINE self.output_projector = nn.Linear(self.hparams.hidden_size, self.hparams.output_size)NEWLINE assert not isinstance(self.loss, QuantileLoss), "QuantileLoss does not work with recurrent network"NEWLINE else: # multi targetNEWLINE self.output_projector = nn.ModuleList(NEWLINE [nn.Linear(self.hparams.hidden_size, size) for size in self.hparams.output_size]NEWLINE )NEWLINE for l in self.loss:NEWLINE assert not isinstance(l, QuantileLoss), "QuantileLoss does not work with recurrent network"NEWLINENEWLINE @classmethodNEWLINE def from_dataset(NEWLINE cls,NEWLINE dataset: TimeSeriesDataSet,NEWLINE allowed_encoder_known_variable_names: List[str] = None,NEWLINE **kwargs,NEWLINE ):NEWLINE """NEWLINE Create model from dataset.NEWLINENEWLINE Args:NEWLINE dataset: timeseries datasetNEWLINE allowed_encoder_known_variable_names: List of known variables that are allowed in encoder, defaults to allNEWLINE **kwargs: additional arguments such as hyperparameters for model (see ``__init__()``)NEWLINENEWLINE Returns:NEWLINE Recurrent networkNEWLINE """NEWLINE new_kwargs = copy(kwargs)NEWLINE new_kwargs.update(cls.deduce_default_output_parameters(dataset=dataset, kwargs=kwargs, default_loss=MAE()))NEWLINE assert not isinstance(dataset.target_normalizer, NaNLabelEncoder) and (NEWLINE not isinstance(dataset.target_normalizer, MultiNormalizer)NEWLINE or all([not isinstance(normalizer, NaNLabelEncoder) for normalizer in dataset.target_normalizer])NEWLINE ), "target(s) should be continuous - categorical targets are not supported" # todo: remove this restrictionNEWLINE return super().from_dataset(NEWLINE dataset, allowed_encoder_known_variable_names=allowed_encoder_known_variable_names, **new_kwargsNEWLINE )NEWLINENEWLINE def construct_input_vector(NEWLINE self, x_cat: torch.Tensor, x_cont: torch.Tensor, one_off_target: torch.Tensor = NoneNEWLINE ) -> torch.Tensor:NEWLINE """NEWLINE Create input vector into RNN networkNEWLINENEWLINE Args:NEWLINE one_off_target: tensor to insert into first position of target. If None (default), remove first time step.NEWLINE """NEWLINE # create input vectorNEWLINE if len(self.categoricals) > 0:NEWLINE embeddings = self.embeddings(x_cat)NEWLINE flat_embeddings = torch.cat([emb for emb in embeddings.values()], dim=-1)NEWLINE input_vector = flat_embeddingsNEWLINENEWLINE if len(self.reals) > 0:NEWLINE input_vector = x_contNEWLINENEWLINE if len(self.reals) > 0 and len(self.categoricals) > 0:NEWLINE input_vector = torch.cat([x_cont, flat_embeddings], dim=-1)NEWLINENEWLINE # shift target by oneNEWLINE input_vector[..., self.target_positions] = torch.roll(NEWLINE input_vector[..., self.target_positions], shifts=1, dims=1NEWLINE )NEWLINENEWLINE if one_off_target is not None: # set first target input (which is rolled over)NEWLINE input_vector[:, 0, self.target_positions] = one_off_targetNEWLINE else:NEWLINE input_vector = input_vector[:, 1:]NEWLINENEWLINE # shift targetNEWLINE return input_vectorNEWLINENEWLINE def encode(self, x: Dict[str, torch.Tensor]) -> HiddenState:NEWLINE """NEWLINE Encode sequence into hidden stateNEWLINE """NEWLINE # encode using rnnNEWLINE assert x["encoder_lengths"].min() > 0NEWLINE encoder_lengths = x["encoder_lengths"] - 1NEWLINE input_vector = self.construct_input_vector(x["encoder_cat"], x["encoder_cont"])NEWLINE _, hidden_state = self.rnn(NEWLINE input_vector, lengths=encoder_lengths, enforce_sorted=FalseNEWLINE ) # second ouput is not needed (hidden state)NEWLINE return hidden_stateNEWLINENEWLINE def decode_all(NEWLINE self,NEWLINE x: torch.Tensor,NEWLINE hidden_state: HiddenState,NEWLINE lengths: torch.Tensor = None,NEWLINE ):NEWLINE decoder_output, hidden_state = self.rnn(x, hidden_state, lengths=lengths, enforce_sorted=False)NEWLINE if isinstance(self.hparams.target, str): # single targetNEWLINE output = self.output_projector(decoder_output)NEWLINE else:NEWLINE output = [projector(decoder_output) for projector in self.output_projector]NEWLINE return output, hidden_stateNEWLINENEWLINE def decode(NEWLINE self,NEWLINE input_vector: torch.Tensor,NEWLINE target_scale: torch.Tensor,NEWLINE decoder_lengths: torch.Tensor,NEWLINE hidden_state: HiddenState,NEWLINE n_samples: int = None,NEWLINE ) -> Tuple[torch.Tensor, bool]:NEWLINE """NEWLINE Decode hidden state of RNN into prediction. If n_smaples is given,NEWLINE decode not by using actual values but rather byNEWLINE sampling new targets from past predictions iterativelyNEWLINE """NEWLINE if self.training:NEWLINE output, _ = self.decode_all(input_vector, hidden_state, lengths=decoder_lengths)NEWLINE output = self.transform_output(output, target_scale=target_scale)NEWLINE else:NEWLINE # run in eval, i.e. simulation modeNEWLINE target_pos = self.target_positionsNEWLINE lagged_target_positions = self.lagged_target_positionsNEWLINENEWLINE # define function to run at every decoding stepNEWLINE def decode_one(NEWLINE idx,NEWLINE lagged_targets,NEWLINE hidden_state,NEWLINE ):NEWLINE x = input_vector[:, [idx]]NEWLINE x[:, 0, target_pos] = lagged_targets[-1]NEWLINE for lag, lag_positions in lagged_target_positions.items():NEWLINE if idx > lag:NEWLINE x[:, 0, lag_positions] = lagged_targets[-lag]NEWLINE prediction, hidden_state = self.decode_all(x, hidden_state)NEWLINE prediction = apply_to_list(prediction, lambda x: x[:, 0]) # select first time stepNEWLINE return prediction, hidden_stateNEWLINENEWLINE # make predictions which are fed into next stepNEWLINE output = self.decode_autoregressive(NEWLINE decode_one,NEWLINE first_target=input_vector[:, 0, target_pos],NEWLINE first_hidden_state=hidden_state,NEWLINE target_scale=target_scale,NEWLINE n_decoder_steps=input_vector.size(1),NEWLINE )NEWLINE return outputNEWLINENEWLINE def forward(self, x: Dict[str, torch.Tensor], n_samples: int = None) -> Dict[str, torch.Tensor]:NEWLINE """NEWLINE Forward networkNEWLINE """NEWLINE hidden_state = self.encode(x)NEWLINE # decodeNEWLINE input_vector = self.construct_input_vector(NEWLINE x["decoder_cat"],NEWLINE x["decoder_cont"],NEWLINE one_off_target=x["encoder_cont"][NEWLINE torch.arange(x["encoder_cont"].size(0), device=x["encoder_cont"].device),NEWLINE x["encoder_lengths"] - 1,NEWLINE self.target_positions.unsqueeze(-1),NEWLINE ].T,NEWLINE )NEWLINENEWLINE output = self.decode(NEWLINE input_vector,NEWLINE decoder_lengths=x["decoder_lengths"],NEWLINE target_scale=x["target_scale"],NEWLINE hidden_state=hidden_state,NEWLINE )NEWLINE # return relevant partNEWLINE return self.to_network_output(prediction=output)NEWLINE |
"""Resnet v1 model variants.NEWLINECode branched out from slim/nets/resnet_v1.py, and please refer to it forNEWLINEmore details.NEWLINEThe original version ResNets-v1 were proposed by:NEWLINE[1] Kaiming He, Xiangyu Zhang, Shaoqing Ren, Jian SunNEWLINE Deep Residual Learning for Image Recognition. arXiv:1512.03385NEWLINE"""NEWLINEfrom __future__ import absolute_importNEWLINEfrom __future__ import divisionNEWLINEfrom __future__ import print_functionNEWLINENEWLINEimport functoolsNEWLINEimport tensorflow as tfNEWLINENEWLINEfrom models import resnet_utilsNEWLINEfrom utils.metrics import *NEWLINEfrom utils.loss import *NEWLINEimport warningsNEWLINEwarnings.filterwarnings('ignore')NEWLINEfrom tensorflow.contrib import layersNEWLINEfrom tensorflow.contrib.framework.python.ops import add_arg_scopeNEWLINEfrom tensorflow.contrib.framework.python.ops import arg_scopeNEWLINEfrom tensorflow.contrib.layers.python.layers import utilsNEWLINEfrom tensorflow.contrib.layers.python.layers import regularizersNEWLINENEWLINE_DEFAULT_MULTI_GRID = [1, 1, 1]NEWLINENEWLINEdef update_argparser(parser):NEWLINE parser.set_defaults(NEWLINE train_steps=40000,NEWLINE learning_rate=((20000,30000), (0.0001, 0.00001,0.000001)),NEWLINE save_checkpoints_steps=200,NEWLINE )NEWLINENEWLINENEWLINE@add_arg_scopeNEWLINEdef bottleneck(inputs,NEWLINE depth,NEWLINE depth_bottleneck,NEWLINE stride,NEWLINE unit_rate=1,NEWLINE rate=1,NEWLINE outputs_collections=None,NEWLINE scope=None):NEWLINE """Bottleneck residual unit variant with BN after convolutions.NEWLINE This is the original residual unit proposed in [1]. See Fig. 1(a) of [2] forNEWLINE its definition. Note that we use here the bottleneck variant which has anNEWLINE extra bottleneck layer.NEWLINE When putting together two consecutive ResNet blocks that use this unit, oneNEWLINE should use stride = 2 in the last unit of the first block.NEWLINE Args:NEWLINE inputs: A tensor of size [batch, height, width, channels].NEWLINE depth: The depth of the ResNet unit output.NEWLINE depth_bottleneck: The depth of the bottleneck layers.NEWLINE stride: The ResNet unit's stride. Determines the amount of downsampling ofNEWLINE the units output compared to its input.NEWLINE unit_rate: An integer, unit rate for atrous convolution.NEWLINE rate: An integer, rate for atrous convolution.NEWLINE outputs_collections: Collection to add the ResNet unit output.NEWLINE scope: Optional variable_scope.NEWLINE Returns:NEWLINE The ResNet unit's output.NEWLINE """NEWLINE with tf.variable_scope(scope, 'bottleneck_v1', [inputs]) as sc:NEWLINE depth_in = utils.last_dimension(inputs.get_shape(), min_rank=4)NEWLINE if depth == depth_in:NEWLINE shortcut = resnet_utils.subsample(inputs, stride, 'shortcut')NEWLINE else:NEWLINE shortcut = layers.conv2d(NEWLINE inputs,NEWLINE depth,NEWLINE [1, 1],NEWLINE stride=stride,NEWLINE activation_fn=None,NEWLINE scope='shortcut')NEWLINENEWLINE residual = layers.conv2d(inputs, depth_bottleneck, [1, 1], stride=1,NEWLINE scope='conv1')NEWLINE residual = resnet_utils.conv2d_same(residual, depth_bottleneck, 3, stride,NEWLINE rate=rate*unit_rate, scope='conv2')NEWLINE residual = layers.conv2d(residual, depth, [1, 1], stride=1,NEWLINE activation_fn=None, scope='conv3')NEWLINE output = tf.nn.relu(shortcut + residual)NEWLINENEWLINE return utils.collect_named_outputs(outputs_collections,NEWLINE sc.name,NEWLINE output)NEWLINENEWLINENEWLINEdef root_block_fn_for_beta_variant(net):NEWLINE """Gets root_block_fn for beta variant.NEWLINE ResNet-v1 beta variant modifies the first original 7x7 convolution to threeNEWLINE 3x3 convolutions.NEWLINE Args:NEWLINE net: A tensor of size [batch, height, width, channels], input to the model.NEWLINE Returns:NEWLINE A tensor after three 3x3 convolutions.NEWLINE """NEWLINE net = resnet_utils.conv2d_same(net, 64, 3, stride=2, scope='conv1_1')NEWLINE net = resnet_utils.conv2d_same(net, 64, 3, stride=1, scope='conv1_2')NEWLINE net = resnet_utils.conv2d_same(net, 128, 3, stride=1, scope='conv1_3')NEWLINENEWLINE return netNEWLINENEWLINENEWLINEdef resnet_v1_beta(inputs,NEWLINE blocks,NEWLINE num_classes=None,NEWLINE is_training=None,NEWLINE global_pool=True,NEWLINE output_stride=None,NEWLINE root_block_fn=None,NEWLINE scope=None):NEWLINE """Generator for v1 ResNet models (beta variant).NEWLINE This function generates a family of modified ResNet v1 models. In particular,NEWLINE the first original 7x7 convolution is replaced with three 3x3 convolutions.NEWLINE See the resnet_v1_*() methods for specific model instantiations, obtained byNEWLINE selecting different block instantiations that produce ResNets of variousNEWLINE depths.NEWLINE The code is modified from slim/nets/resnet_v1.py, and please refer to it forNEWLINE more details.NEWLINE Args:NEWLINE inputs: A tensor of size [batch, height_in, width_in, channels].NEWLINE blocks: A list of length equal to the number of ResNet blocks. Each elementNEWLINE is a resnet_utils.Block object describing the units in the block.NEWLINE num_classes: Number of predicted classes for classification tasks. If NoneNEWLINE we return the features before the logit layer.NEWLINE is_training: Enable/disable is_training for batch normalization.NEWLINE global_pool: If True, we perform global average pooling before computing theNEWLINE logits. Set to True for image classification, False for dense prediction.NEWLINE output_stride: If None, then the output will be computed at the nominalNEWLINE network stride. If output_stride is not None, it specifies the requestedNEWLINE ratio of input to output spatial resolution.NEWLINE root_block_fn: The function consisting of convolution operations applied toNEWLINE the root input. If root_block_fn is None, use the original setting ofNEWLINE RseNet-v1, which is simply one convolution with 7x7 kernel and stride=2.NEWLINE reuse: whether or not the network and its variables should be reused. To beNEWLINE able to reuse 'scope' must be given.NEWLINE scope: Optional variable_scope.NEWLINE Returns:NEWLINE net: A rank-4 tensor of size [batch, height_out, width_out, channels_out].NEWLINE If global_pool is False, then height_out and width_out are reduced by aNEWLINE factor of output_stride compared to the respective height_in and width_in,NEWLINE else both height_out and width_out equal one. If num_classes is None, thenNEWLINE net is the output of the last ResNet block, potentially after globalNEWLINE average pooling. If num_classes is not None, net contains the pre-softmaxNEWLINE activations.NEWLINE end_points: A dictionary from components of the network to the correspondingNEWLINE activation.NEWLINE Raises:NEWLINE ValueError: If the target output_stride is not valid.NEWLINE """NEWLINE if root_block_fn is None:NEWLINE root_block_fn = functools.partial(resnet_utils.conv2d_same,NEWLINE num_outputs=64,NEWLINE kernel_size=7,NEWLINE stride=2,NEWLINE scope='conv1')NEWLINE with tf.variable_scope(scope, 'resnet_v1', [inputs]) as sc:NEWLINE end_points_collection = sc.original_name_scope + '_end_points'NEWLINE with arg_scope([layers.conv2d, bottleneck,NEWLINE resnet_utils.stack_blocks_dense],NEWLINE outputs_collections=end_points_collection):NEWLINE if is_training is not None:NEWLINE arg_sc = arg_scope([layers.batch_norm], is_training=is_training)NEWLINE else:NEWLINE arg_sc = arg_scope([])NEWLINE with arg_sc:NEWLINE net = inputsNEWLINE if output_stride is not None:NEWLINE if output_stride % 4 != 0:NEWLINE raise ValueError('The output_stride needs to be a multiple of 4.')NEWLINE output_stride /= 4NEWLINE print(str(output_stride) + 'Before resnet blocks')NEWLINE net = root_block_fn(net)NEWLINE net = layers.max_pool2d(net, 3, stride=2, padding='SAME', scope='pool1')NEWLINE net = resnet_utils.stack_blocks_dense(net, blocks, output_stride)NEWLINENEWLINE if global_pool:NEWLINE # Global average pooling.NEWLINE net = tf.reduce_mean(net, [1, 2], name='pool5', keepdims=True)NEWLINE if num_classes is not None:NEWLINE net = layers.conv2d(net, num_classes, [1, 1], activation_fn=None,NEWLINE normalizer_fn=None, scope='logit')NEWLINE # Convert end_points_collection into a dictionary of end_points.NEWLINE end_points = utils.convert_collection_to_dict(end_points_collection)NEWLINE if num_classes is not None:NEWLINE end_points['predictions'] = layers.softmax(net, scope='predictions')NEWLINE return net, end_pointsNEWLINENEWLINENEWLINEdef resnet_v1_beta_block(scope, base_depth, num_units, stride):NEWLINE """Helper function for creating a resnet_v1 beta variant bottleneck block.NEWLINE Args:NEWLINE scope: The scope of the block.NEWLINE base_depth: The depth of the bottleneck layer for each unit.NEWLINE num_units: The number of units in the block.NEWLINE stride: The stride of the block, implemented as a stride in the last unit.NEWLINE All other units have stride=1.NEWLINE Returns:NEWLINE A resnet_v1 bottleneck block.NEWLINE """NEWLINE return resnet_utils.Block(scope, bottleneck, [{NEWLINE 'depth': base_depth * 4,NEWLINE 'depth_bottleneck': base_depth,NEWLINE 'stride': 1,NEWLINE 'unit_rate': 1NEWLINE }] * (num_units - 1) + [{NEWLINE 'depth': base_depth * 4,NEWLINE 'depth_bottleneck': base_depth,NEWLINE 'stride': stride,NEWLINE 'unit_rate': 1NEWLINE }])NEWLINENEWLINEdef resnet_v1_101_beta(inputs,NEWLINE num_classes=None,NEWLINE is_training=None,NEWLINE global_pool=False,NEWLINE output_stride=None,NEWLINE multi_grid=None,NEWLINE scope='resnet_v1_101'):NEWLINE """Resnet v1 101 beta variant.NEWLINE This variant modifies the first convolution layer of ResNet-v1-101. InNEWLINE particular, it changes the original one 7x7 convolution to three 3x3NEWLINE convolutions.NEWLINE Args:NEWLINE inputs: A tensor of size [batch, height_in, width_in, channels].NEWLINE num_classes: Number of predicted classes for classification tasks. If NoneNEWLINE we return the features before the logit layer.NEWLINE is_training: Enable/disable is_training for batch normalization.NEWLINE global_pool: If True, we perform global average pooling before computing theNEWLINE logits. Set to True for image classification, False for dense prediction.NEWLINE output_stride: If None, then the output will be computed at the nominalNEWLINE network stride. If output_stride is not None, it specifies the requestedNEWLINE ratio of input to output spatial resolution.NEWLINE multi_grid: Employ a hierarchy of different atrous rates within network.NEWLINE reuse: whether or not the network and its variables should be reused. To beNEWLINE able to reuse 'scope' must be given.NEWLINE scope: Optional variable_scope.NEWLINE Returns:NEWLINE net: A rank-4 tensor of size [batch, height_out, width_out, channels_out].NEWLINE If global_pool is False, then height_out and width_out are reduced by aNEWLINE factor of output_stride compared to the respective height_in and width_in,NEWLINE else both height_out and width_out equal one. If num_classes is None, thenNEWLINE net is the output of the last ResNet block, potentially after globalNEWLINE average pooling. If num_classes is not None, net contains the pre-softmaxNEWLINE activations.NEWLINE end_points: A dictionary from components of the network to the correspondingNEWLINE activation.NEWLINE Raises:NEWLINE ValueError: if multi_grid is not None and does not have length = 3.NEWLINE """NEWLINE if multi_grid is None:NEWLINE multi_grid = _DEFAULT_MULTI_GRIDNEWLINE else:NEWLINE if len(multi_grid) != 3:NEWLINE raise ValueError('Expect multi_grid to have length 3.')NEWLINENEWLINE blocks = [NEWLINE resnet_v1_beta_block(NEWLINE 'block1', base_depth=64, num_units=3, stride=2),NEWLINE resnet_v1_beta_block(NEWLINE 'block2', base_depth=128, num_units=4, stride=2),NEWLINE resnet_v1_beta_block(NEWLINE 'block3', base_depth=256, num_units=23, stride=2),NEWLINE resnet_utils.Block('block4', bottleneck, [NEWLINE {'depth': 2048,NEWLINE 'depth_bottleneck': 512,NEWLINE 'stride': 1,NEWLINE 'unit_rate': rate} for rate in multi_grid]),NEWLINE ]NEWLINE return resnet_v1_beta(NEWLINE inputs,NEWLINE blocks=blocks,NEWLINE num_classes=num_classes,NEWLINE is_training=is_training,NEWLINE global_pool=global_pool,NEWLINE output_stride=output_stride,NEWLINE root_block_fn=functools.partial(root_block_fn_for_beta_variant),NEWLINE scope=scope)NEWLINENEWLINEdef atrous_spatial_pyramid_pooling(net, scope, output_stride, is_training, weight_decay, depth=256):NEWLINE """NEWLINE ASPP consists of (a) one 1×1 convolution and three 3×3 convolutions with rates = (6, 12, 18) when output stride = 16NEWLINE when output stride = 8, rates are doubledNEWLINE (all with 256 filters and batch normalization), and (b) the image-level features as described in https://arxiv.org/abs/1706.05587NEWLINE :param net: tensor of shape [BATCH_SIZE, WIDTH, HEIGHT, DEPTH]NEWLINE :param scope: scope name of the aspp layerNEWLINE :return: network layer with aspp applyed to it.NEWLINE """NEWLINE if output_stride == 16:NEWLINE rates = [6,12,18]NEWLINE elif output_stride == 8:NEWLINE rates = [12,24,36]NEWLINENEWLINE with tf.variable_scope(scope):NEWLINE batch_norm_params = {NEWLINE 'is_training': is_training,NEWLINE 'decay': 0.9997,NEWLINE 'epsilon': 1e-5,NEWLINE 'scale': True,NEWLINE }NEWLINENEWLINE with arg_scope(NEWLINE [layers.conv2d],NEWLINE # comment next line of code if multiple gpus are usedNEWLINE weights_regularizer=regularizers.l2_regularizer(weight_decay),NEWLINE activation_fn=tf.nn.relu,NEWLINE normalizer_fn=layers.batch_norm,NEWLINE normalizer_params=batch_norm_params):NEWLINE NEWLINE with arg_scope([layers.batch_norm], **batch_norm_params):NEWLINENEWLINE feature_map_size = tf.shape(net)NEWLINE # apply global average poolingNEWLINE image_level_features = tf.reduce_mean(net, [1, 2], name='image_level_global_pool', keepdims=True)NEWLINE image_level_features = layers.conv2d(image_level_features, depth, [1, 1], scope="image_level_conv_1x1",NEWLINE activation_fn=None)NEWLINE image_level_features = tf.image.resize_bilinear(image_level_features, (feature_map_size[1], feature_map_size[2]))NEWLINENEWLINE at_pool1x1 = layers.conv2d(net, depth, [1, 1], scope="conv_1x1_0", activation_fn=None)NEWLINENEWLINE at_pool3x3_1 = layers.conv2d(net, depth, [3, 3], scope="conv_3x3_1", rate=rates[0], activation_fn=None)NEWLINENEWLINE at_pool3x3_2 = layers.conv2d(net, depth, [3, 3], scope="conv_3x3_2", rate=rates[1], activation_fn=None)NEWLINENEWLINE at_pool3x3_3 = layers.conv2d(net, depth, [3, 3], scope="conv_3x3_3", rate=rates[2], activation_fn=None)NEWLINENEWLINE net = tf.concat((image_level_features, at_pool1x1, at_pool3x3_1, at_pool3x3_2, at_pool3x3_3), axis=3,NEWLINE name="concat")NEWLINE net = layers.conv2d(net, depth, [1, 1], scope="conv_1x1_output", activation_fn=None)NEWLINE net = layers.dropout(net, keep_prob=0.9, is_training=is_training, scope="dropout")NEWLINE return netNEWLINENEWLINE#用@add_arg_scope修饰目标函数NEWLINE#用with arg_scope(...) 设置默认参数.NEWLINEdef deeplab_v3(inputs, args, is_training, output_stride):NEWLINENEWLINE # inputs has shape - Original: [batch, 513, 513, 3]NEWLINE with arg_scope(resnet_utils.resnet_arg_scope(args.l2_regularizer, is_training)):NEWLINE _, end_points = resnet_v1_101_beta(inputs,NEWLINE args.num_classes,NEWLINE is_training=is_training,NEWLINE global_pool=False,NEWLINE output_stride=output_stride,NEWLINE multi_grid=args.multi_grid)NEWLINENEWLINE with tf.variable_scope("DeepLab_v3"):NEWLINENEWLINE # get block 4 feature outputsNEWLINE net = end_points[args.resnet_model + '/block4']NEWLINENEWLINE net = atrous_spatial_pyramid_pooling(net, "ASPP_layer", output_stride, is_training, args.l2_regularizer, depth=256)NEWLINENEWLINE net = layers.conv2d(net, args.num_classes, [1, 1], activation_fn=None,NEWLINE normalizer_fn=None, scope='logits')NEWLINENEWLINE size = tf.shape(inputs)[1:3]NEWLINE # resize the output logits to match the labels dimensionsNEWLINE net = tf.image.resize_bilinear(net, size)NEWLINE return netNEWLINENEWLINEdef model_fn(features, labels, mode, params):NEWLINE ''' Model function'''NEWLINENEWLINE output_stride = NoneNEWLINENEWLINE if mode == tf.estimator.ModeKeys.TRAIN:NEWLINE train = TrueNEWLINE output_stride = params.train_output_strideNEWLINE else:NEWLINE train = FalseNEWLINE output_stride = params.eval_output_strideNEWLINE NEWLINE img_input = tf.reshape(features, [-1, params.crop_size, params.crop_size, 3])NEWLINENEWLINE # Create networkNEWLINE raw_output = deeplab_v3(img_input, params, train, output_stride)NEWLINENEWLINE predictions = tf.argmax(raw_output, axis=-1)NEWLINENEWLINE # Setup the estimator according to the phase (Train, eval)NEWLINE reduced_loss = NoneNEWLINE train_op = NoneNEWLINE eval_metric_ops = {}NEWLINENEWLINE # compute loss(train and eval)NEWLINE loss = softmax_sparse_crossentropy_ignoring_last_label(labels,raw_output)NEWLINENEWLINE # L2 regularizationNEWLINE l2_losses = tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES)NEWLINENEWLINE # Trainable VariablesNEWLINE #all_trainable = tf.trainable_variables()NEWLINE # L2 regularizationNEWLINE #l2_losses = [params.l2_regularizer * tf.nn.l2_loss(v) for v in all_trainable if 'weights' in v.name]NEWLINENEWLINE # Loss functionNEWLINE reduced_loss = tf.reduce_mean(loss) + tf.add_n(l2_losses)NEWLINENEWLINENEWLINE # evaluation metricNEWLINE miou, update_op = mIOU(raw_output,labels,params.num_classes,img_input)NEWLINENEWLINENEWLINE # configure trainingNEWLINE if mode == tf.estimator.ModeKeys.TRAIN:NEWLINE # piecewise learning rate schedulerNEWLINE global_step = tf.train.get_or_create_global_step()NEWLINE learning_rate = tf.train.piecewise_constant(global_step, params.learning_rate[0], params.learning_rate[1])NEWLINENEWLINE '''NEWLINE # learning rate schedulerNEWLINE global_step = tf.train.get_or_create_global_step()NEWLINE starter_learning_rate = 0.0001NEWLINE end_learning_rate = 0NEWLINE decay_steps = params.train_stepsNEWLINE learning_rate = tf.train.polynomial_decay(starter_learning_rate, global_step,NEWLINE decay_steps, end_learning_rate,NEWLINE power=0.9)NEWLINE '''NEWLINE NEWLINE # SGD + momentum optimizerNEWLINE optimizer = tf.train.MomentumOptimizer(learning_rate,momentum = 0.9)NEWLINE # comment out next two lines if batch norm is frozenNEWLINE # NOTE still need this because aspp needs batch normNEWLINE update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)NEWLINE with tf.control_dependencies(update_ops):NEWLINE train_op = optimizer.minimize(reduced_loss, global_step=tf.train.get_or_create_global_step())NEWLINENEWLINE if mode == tf.estimator.ModeKeys.EVAL:NEWLINE eval_metric_ops = {NEWLINE 'miou': (miou, update_op)NEWLINE }NEWLINENEWLINE return tf.estimator.EstimatorSpec(NEWLINE mode=mode,NEWLINE predictions=predictions,NEWLINE loss=reduced_loss,NEWLINE train_op=train_op,NEWLINE eval_metric_ops=eval_metric_ops,NEWLINE export_outputs=None,NEWLINE )NEWLINE |
# TRANSMITTER HAS A DIRECTIONAL ANTENNA - POINTED IN 12 DIFFERENT POSESNEWLINE# RECEIVER HAS AN OMNI DIRECTIONAL ANTENNANEWLINE# DISTANCE BETWEEN RECEIVER AND TRANSMITTER - (5, 10, 15) FEETNEWLINE# IMPEMENTING HIERARCHICAL MACHINE LEARNINGNEWLINE# IMPLEMENTING TRANSFER LEARNINGNEWLINE# DATA COLLECTED IN INDOOR ENVIRONMENTNEWLINENEWLINE#############################################################NEWLINE# Pose Estimation and Ranging the RF Transmitter #NEWLINE# Neural Network for Direction Finding Data 2020 #NEWLINE# Author: Debashri Roy #NEWLINE#############################################################NEWLINENEWLINE############ IMPORTING NECESSARY PACKAGES ################NEWLINEimport numpy as np # Package for numerical computationNEWLINEnp.set_printoptions(threshold=np.inf) # To print each elementsNEWLINEimport time # Package is for computing execution timeNEWLINEimport sys # Package to get command line argumentsNEWLINEimport tensorflow as tfNEWLINEfrom sklearn.model_selection import train_test_splitNEWLINEfrom array import arrayNEWLINENEWLINE# by setting env variables before Keras import you can set up which backendNEWLINEimport os,randomNEWLINE#os.environ["KERAS_BACKEND"] = "theano"NEWLINEos.environ["KERAS_BACKEND"] = "tensorflow"NEWLINEos.environ["THEANO_FLAGS"] = "device=cuda0, dnn.enabled=False"NEWLINEimport theanoNEWLINE#theano.config.mode = ""NEWLINENEWLINENEWLINENEWLINENEWLINEimport theano as thNEWLINEimport theano.tensor as TNEWLINEfrom keras.utils import np_utilsNEWLINEimport keras.models as modelsNEWLINEfrom keras.models import SequentialNEWLINEfrom keras.layers.core import Reshape,Dense,Dropout,Activation,FlattenNEWLINEfrom keras.layers import EmbeddingNEWLINEfrom keras.layers.noise import GaussianNoiseNEWLINEfrom keras.layers.convolutional import Conv2D, Conv1D, Convolution2D, MaxPooling2D, ZeroPadding2D, Convolution1DNEWLINEfrom keras.regularizers import *NEWLINEfrom keras.optimizers import adam, Nadam, AdadeltaNEWLINEfrom keras.optimizers import Adam, RMSprop, AdagradNEWLINEfrom keras.layers.convolutional_recurrent import ConvLSTM2DNEWLINEfrom keras.optimizers import rmspropNEWLINEfrom keras.callbacks import ReduceLROnPlateau, ModelCheckpointNEWLINE#from keras.regularizers import l2, activity_l2NEWLINEfrom sklearn import preprocessingNEWLINEfrom sklearn.preprocessing import StandardScalerNEWLINEfrom keras.layers.advanced_activations import LeakyReLU, PReLUNEWLINE# import BatchNormalizationNEWLINEfrom keras.layers.normalization import BatchNormalizationNEWLINEfrom keras.layers import GRU, RNN, SimpleRNN, LSTM, GRUCell, SimpleRNNCell, LSTMCellNEWLINENEWLINEfrom sklearn.metrics import classification_reportNEWLINEfrom sklearn.metrics import confusion_matrixNEWLINENEWLINEfrom keras.utils.np_utils import to_categoricalNEWLINEfrom keras.optimizers import SGDNEWLINENEWLINEimport matplotlibNEWLINE#matplotlib.use('TkAgg')NEWLINEmatplotlib.use('Agg')NEWLINEimport matplotlib.pyplot as pltNEWLINE#import seaborn as snsNEWLINEimport kerasNEWLINEimport itertoolsNEWLINEimport scipyNEWLINENEWLINEfrom keras.models import load_modelNEWLINENEWLINE########## FUNCTIONS TO CALCULATE F SCORE OF THE MODEL ###############NEWLINEfrom keras import backend as KNEWLINEdef recall_m(y_true, y_pred):NEWLINE true_positives = K.sum(K.round(K.clip(y_true * y_pred, 0, 1)))NEWLINE possible_positives = K.sum(K.round(K.clip(y_true, 0, 1)))NEWLINE recall = true_positives / (possible_positives + K.epsilon())NEWLINE return recallNEWLINENEWLINENEWLINEdef precision_m(y_true, y_pred):NEWLINE true_positives = K.sum(K.round(K.clip(y_true * y_pred, 0, 1)))NEWLINE predicted_positives = K.sum(K.round(K.clip(y_pred, 0, 1)))NEWLINE precision = true_positives / (predicted_positives + K.epsilon())NEWLINE return precisionNEWLINENEWLINENEWLINEdef f1_m(y_true, y_pred):NEWLINE precision = precision_m(y_true, y_pred)NEWLINE recall = recall_m(y_true, y_pred)NEWLINE return 2 * ((precision * recall) / (precision + recall + K.epsilon()))NEWLINE######################################################################NEWLINENEWLINENEWLINE################# THE WEIGHT MATRIX #################3NEWLINEW = np.matrix([[np.cos(1*(np.pi/8)), np.sin(1*(np.pi/8))],NEWLINE[np.cos(2*(np.pi/8)), np.sin(2*(np.pi/8))],NEWLINE[np.cos(3*(np.pi/8)), np.sin(3*(np.pi/8))],NEWLINE[np.cos(4*(np.pi/8)), np.sin(4*(np.pi/8))],NEWLINE[np.cos(5*(np.pi/8)), np.sin(5*(np.pi/8))],NEWLINE[np.cos(6*(np.pi/8)), np.sin(6*(np.pi/8))],NEWLINE[np.cos(7*(np.pi/8)), np.sin(7*(np.pi/8))],NEWLINE[np.cos(8*(np.pi/8)), np.sin(8*(np.pi/8))]]NEWLINE)NEWLINENEWLINE# W = np.matrix([[np.cos(4*(np.pi/8)), np.sin(4*(np.pi/8))],NEWLINE# [np.cos(4*(np.pi/8)), np.sin(4*(np.pi/8))],NEWLINE# [np.cos(4*(np.pi/8)), np.sin(4*(np.pi/8))],NEWLINE# [np.cos(4*(np.pi/8)), np.sin(4*(np.pi/8))],NEWLINE# [np.cos(0*(np.pi/8)), np.sin(0*(np.pi/8))],NEWLINE# [np.cos(0*(np.pi/8)), np.sin(0*(np.pi/8))],NEWLINE# [np.cos(0*(np.pi/8)), np.sin(0*(np.pi/8))],NEWLINE# [np.cos(0*(np.pi/8)), np.sin(0*(np.pi/8))]]NEWLINE# )NEWLINENEWLINEprint(W)NEWLINENEWLINE# variablesNEWLINEdtype_all= scipy.dtype([('raw-iq', scipy.complex64)])NEWLINENEWLINENEWLINEsample_size = 1024 # CHANGE AND EXPERIMENT -512NEWLINEno_of_samples = 4000 # CHANGE AND EXPERIMENT - 4000NEWLINEno_of_features= 8 # CHANGE AND EXPERIMENTNEWLINEnumber_of_data_to_read = sample_size * no_of_samplesNEWLINENEWLINE#######################################################################################NEWLINENEWLINE#############################################################################################################################NEWLINE######## #######NEWLINE######## READING THE 5FT DATA #######NEWLINE######## #######NEWLINE#############################################################################################################################NEWLINENEWLINEdata_file_loc1 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/5ft/0_5ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER DIRECTLY POINTING TO THE RECEIVERNEWLINEdata_file_loc2 ='/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/5ft/+30_5ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER ANTENNA IS 30 DEGREE LEFT TO THE RECEIVERNEWLINEdata_file_loc3 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/5ft/+60_5ft_06_16_2020_914MHz_indoor.dat'# TRANSMITTER ANTENNA IS 60 DEGREE LEFT TO THE RECEIVERNEWLINEdata_file_loc4 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/5ft/+90_5ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER ANTENNA IS 90 DEGREE LEFT TO THE RECEIVERNEWLINENEWLINEdata_file_loc5 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/5ft/+120_5ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER ANTENNA IS 120 DEGREE LEFT TO THE RECEIVERNEWLINEdata_file_loc6 ='/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/5ft/+150_5ft_06_16_2020_914MHz_indoor.dat'# TRANSMITTER ANTENNA IS 150 DEGREE LEFT TO THE RECEIVERNEWLINEdata_file_loc7 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/5ft/180_5ft_06_16_2020_914MHz_indoor.dat'# TRANSMITTER ANTENNA IS DIRECTLY POINTED AWAY FROM THE RECEIVERNEWLINEdata_file_loc8 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/5ft/-150_5ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER ANTENNA IS 30 DEGREE RIGHT TO THE RECEIVERNEWLINENEWLINEdata_file_loc9 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/5ft/-120_5ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER ANTENNA IS 60 DEGREE RIGHT TO THE RECEIVERNEWLINEdata_file_loc10 ='/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/5ft/-90_5ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER ANTENNA IS 90 DEGREE RIGHT TO THE RECEIVERNEWLINEdata_file_loc11 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/5ft/-60_5ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER ANTENNA IS 120 DEGREE RIGHT TO THE RECEIVERNEWLINEdata_file_loc12 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/5ft/-30_5ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER ANTENNA IS 150 DEGREE RIGHT TO THE RECEIVERNEWLINENEWLINENEWLINENEWLINEiqdata_loc1 = scipy.fromfile(open(data_file_loc1), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc2 = scipy.fromfile(open(data_file_loc2), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc3 = scipy.fromfile(open(data_file_loc3), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc4 = scipy.fromfile(open(data_file_loc4), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINENEWLINEiqdata_loc5 = scipy.fromfile(open(data_file_loc5), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc6 = scipy.fromfile(open(data_file_loc6), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc7 = scipy.fromfile(open(data_file_loc7), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc8 = scipy.fromfile(open(data_file_loc8), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINENEWLINENEWLINEiqdata_loc9 = scipy.fromfile(open(data_file_loc9), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc10 = scipy.fromfile(open(data_file_loc10), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc11 = scipy.fromfile(open(data_file_loc11), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc12 = scipy.fromfile(open(data_file_loc12), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINENEWLINENEWLINENEWLINE# PREPARING THE DATA WITHOUT TIME INFORMATIONNEWLINEno_of_data_loc1 = iqdata_loc1.shape[0]NEWLINEno_of_data_loc2 = iqdata_loc2.shape[0]NEWLINEno_of_data_loc3 = iqdata_loc3.shape[0]NEWLINEno_of_data_loc4 = iqdata_loc4.shape[0]NEWLINENEWLINEno_of_data_loc5 = iqdata_loc5.shape[0]NEWLINEno_of_data_loc6 = iqdata_loc6.shape[0]NEWLINEno_of_data_loc7 = iqdata_loc7.shape[0]NEWLINEno_of_data_loc8 = iqdata_loc8.shape[0]NEWLINENEWLINEno_of_data_loc9 = iqdata_loc9.shape[0]NEWLINEno_of_data_loc10 = iqdata_loc10.shape[0]NEWLINEno_of_data_loc11 = iqdata_loc11.shape[0]NEWLINEno_of_data_loc12 = iqdata_loc12.shape[0]NEWLINENEWLINENEWLINENEWLINE################################################################################################################NEWLINE# CONCATINATING THE I AND Q VALUES VERTICALLY OF (I, Q) SAMPLE. -- note the axis argument is set to 1 (means vertical stacking)NEWLINE# SIMULATNEOUSLY MULTIPLYING WITH THE WEIGHT MATRIX - TO REFLECT THE MULTI-ANGULAR PROJECTIONNEWLINENEWLINExdata_loc1= np.concatenate([iqdata_loc1['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc1['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc1 = np.matmul(xdata_loc1, np.transpose(W))NEWLINENEWLINENEWLINExdata_loc2= np.concatenate([iqdata_loc2['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc2['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc2 = np.matmul(xdata_loc2, np.transpose(W))NEWLINENEWLINENEWLINExdata_loc3= np.concatenate([iqdata_loc3['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc3['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc3 = np.matmul(xdata_loc3, np.transpose(W))NEWLINENEWLINENEWLINExdata_loc4= np.concatenate([iqdata_loc4['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc4['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc4 = np.matmul(xdata_loc4, np.transpose(W))NEWLINENEWLINENEWLINENEWLINENEWLINExdata_loc5= np.concatenate([iqdata_loc5['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc5['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc5 = np.matmul(xdata_loc5, np.transpose(W))NEWLINENEWLINExdata_loc6= np.concatenate([iqdata_loc6['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc6['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc6 = np.matmul(xdata_loc6, np.transpose(W))NEWLINENEWLINENEWLINExdata_loc7= np.concatenate([iqdata_loc7['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc7['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc7 = np.matmul(xdata_loc7, np.transpose(W))NEWLINENEWLINENEWLINExdata_loc8= np.concatenate([iqdata_loc8['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc8['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc8 = np.matmul(xdata_loc8, np.transpose(W))NEWLINENEWLINENEWLINENEWLINENEWLINExdata_loc9= np.concatenate([iqdata_loc9['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc9['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc9 = np.matmul(xdata_loc9, np.transpose(W))NEWLINENEWLINENEWLINExdata_loc10= np.concatenate([iqdata_loc10['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc10['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc10 = np.matmul(xdata_loc10, np.transpose(W))NEWLINENEWLINENEWLINExdata_loc11= np.concatenate([iqdata_loc11['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc11['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc11 = np.matmul(xdata_loc11, np.transpose(W))NEWLINENEWLINENEWLINExdata_loc12= np.concatenate([iqdata_loc12['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc12['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc12 = np.matmul(xdata_loc12, np.transpose(W))NEWLINENEWLINENEWLINENEWLINENEWLINE# RESHAPING THE XDATANEWLINExdata_loc1= xdata_loc1.T.reshape(no_of_data_loc1//(sample_size), sample_size*no_of_features)NEWLINExdata_loc2 = xdata_loc2.T.reshape(no_of_data_loc2//(sample_size), sample_size*no_of_features)NEWLINExdata_loc3 = xdata_loc3.T.reshape(no_of_data_loc3//(sample_size), sample_size*no_of_features)NEWLINExdata_loc4 = xdata_loc4.T.reshape(no_of_data_loc4//(sample_size), sample_size*no_of_features)NEWLINENEWLINExdata_loc5= xdata_loc5.T.reshape(no_of_data_loc5//(sample_size), sample_size*no_of_features)NEWLINExdata_loc6 = xdata_loc6.T.reshape(no_of_data_loc6//(sample_size), sample_size*no_of_features)NEWLINExdata_loc7 = xdata_loc7.T.reshape(no_of_data_loc7//(sample_size), sample_size*no_of_features)NEWLINExdata_loc8 = xdata_loc8.T.reshape(no_of_data_loc8//(sample_size), sample_size*no_of_features)NEWLINENEWLINExdata_loc9= xdata_loc9.T.reshape(no_of_data_loc9//(sample_size), sample_size*no_of_features)NEWLINExdata_loc10 = xdata_loc10.T.reshape(no_of_data_loc10//(sample_size), sample_size*no_of_features)NEWLINExdata_loc11 = xdata_loc11.T.reshape(no_of_data_loc11//(sample_size), sample_size*no_of_features)NEWLINExdata_loc12 = xdata_loc12.T.reshape(no_of_data_loc12//(sample_size), sample_size*no_of_features)NEWLINENEWLINENEWLINENEWLINE#CONCATINATING THE DIFFERENT POSE DATA HORIZONTALLY (ROWWISE)NEWLINExdata = np.concatenate([xdata_loc1, xdata_loc2, xdata_loc3, xdata_loc4, xdata_loc5, xdata_loc6, xdata_loc7, xdata_loc8, xdata_loc9, xdata_loc10, xdata_loc11, xdata_loc12], axis=0)NEWLINENEWLINENEWLINENEWLINE# CREATING LABEL FOR THE DATASETSNEWLINEydata_loc1 = np.full(xdata_loc1.shape[0], 0, dtype=int)NEWLINEydata_loc2 = np.full(xdata_loc2.shape[0], 1, dtype=int)NEWLINEydata_loc3 = np.full(xdata_loc3.shape[0], 2, dtype=int)NEWLINEydata_loc4 = np.full(xdata_loc4.shape[0], 3, dtype=int)NEWLINENEWLINEydata_loc5 = np.full(xdata_loc5.shape[0], 4, dtype=int)NEWLINEydata_loc6 = np.full(xdata_loc6.shape[0], 5, dtype=int)NEWLINEydata_loc7 = np.full(xdata_loc7.shape[0], 6, dtype=int)NEWLINEydata_loc8 = np.full(xdata_loc8.shape[0], 7, dtype=int)NEWLINENEWLINEydata_loc9 = np.full(xdata_loc9.shape[0], 8, dtype=int)NEWLINEydata_loc10 = np.full(xdata_loc10.shape[0], 9, dtype=int)NEWLINEydata_loc11 = np.full(xdata_loc11.shape[0], 10, dtype=int)NEWLINEydata_loc12 = np.full(xdata_loc12.shape[0], 11, dtype=int)NEWLINENEWLINE#CONCATINATING THE DIFFERENT POSE LABELS HORIZONTALLY (ROWWISE)NEWLINEydata = np.concatenate([ydata_loc1, ydata_loc2, ydata_loc3, ydata_loc4, ydata_loc5, ydata_loc6, ydata_loc7, ydata_loc8, ydata_loc9, ydata_loc10, ydata_loc11, ydata_loc12], axis=0)NEWLINENEWLINENEWLINE# PREPROCESSING X AND Y DATANEWLINExdata =xdata.astype(np.float)NEWLINENEWLINEydata = ydata.astype(np.int).flatten()NEWLINENEWLINE# REMOVING THE NANSNEWLINExdata = np.nan_to_num(xdata)NEWLINENEWLINENEWLINE# ############## RANDOMLY SHUFFLING THE DATA ###################NEWLINE#NEWLINE# first concatinate - TO MAINTIAN THE XDATA AND YDATA MAPPINGNEWLINExydata = np.concatenate([xdata.reshape(xdata.shape[0], xdata.shape[1]), ydata.reshape(ydata.shape[0], 1)], axis=1)NEWLINENEWLINEnp.random.shuffle(xydata)NEWLINENEWLINEprint("Shape of XYDATA", xydata.shape)NEWLINENEWLINExdata_5ft, ydata_5ft = xydata[:,0:sample_size*no_of_features], xydata[:,((sample_size*no_of_features))] # THE LAST COLUMN IS THE YDATA # USE 2 INSTEAD OF 8 OF YOU DO NOT USE MULTI-ANGULAR PROJECTIONNEWLINENEWLINENEWLINE################################################################################################################################NEWLINENEWLINE#############################################################################################################################NEWLINE######## #######NEWLINE######## READING THE 10FT DATA #######NEWLINE######## #######NEWLINE#############################################################################################################################NEWLINENEWLINEdata_file_loc1 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/10ft/0_10ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER DIRECTLY POINTING TO THE RECEIVERNEWLINEdata_file_loc2 ='/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/10ft/+30_10ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER ANTENNA IS 30 DEGREE LEFT TO THE RECEIVERNEWLINEdata_file_loc3 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/10ft/+60_10ft_06_16_2020_914MHz_indoor.dat'# TRANSMITTER ANTENNA IS 60 DEGREE LEFT TO THE RECEIVERNEWLINEdata_file_loc4 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/10ft/+90_10ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER ANTENNA IS 90 DEGREE LEFT TO THE RECEIVERNEWLINENEWLINEdata_file_loc5 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/10ft/+120_10ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER ANTENNA IS 120 DEGREE LEFT TO THE RECEIVERNEWLINEdata_file_loc6 ='/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/10ft/+150_10ft_06_16_2020_914MHz_indoor.dat'# TRANSMITTER ANTENNA IS 150 DEGREE LEFT TO THE RECEIVERNEWLINEdata_file_loc7 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/10ft/180_10ft_06_16_2020_914MHz_indoor.dat'# TRANSMITTER ANTENNA IS DIRECTLY POINTED AWAY FROM THE RECEIVERNEWLINEdata_file_loc8 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/10ft/-150_10ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER ANTENNA IS 30 DEGREE RIGHT TO THE RECEIVERNEWLINENEWLINEdata_file_loc9 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/10ft/-120_10ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER ANTENNA IS 60 DEGREE RIGHT TO THE RECEIVERNEWLINEdata_file_loc10 ='/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/10ft/-90_10ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER ANTENNA IS 90 DEGREE RIGHT TO THE RECEIVERNEWLINEdata_file_loc11 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/10ft/-60_10ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER ANTENNA IS 120 DEGREE RIGHT TO THE RECEIVERNEWLINEdata_file_loc12 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/10ft/-30_10ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER ANTENNA IS 150 DEGREE RIGHT TO THE RECEIVERNEWLINENEWLINENEWLINENEWLINEiqdata_loc1 = scipy.fromfile(open(data_file_loc1), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc2 = scipy.fromfile(open(data_file_loc2), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc3 = scipy.fromfile(open(data_file_loc3), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc4 = scipy.fromfile(open(data_file_loc4), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINENEWLINEiqdata_loc5 = scipy.fromfile(open(data_file_loc5), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc6 = scipy.fromfile(open(data_file_loc6), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc7 = scipy.fromfile(open(data_file_loc7), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc8 = scipy.fromfile(open(data_file_loc8), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINENEWLINENEWLINEiqdata_loc9 = scipy.fromfile(open(data_file_loc9), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc10 = scipy.fromfile(open(data_file_loc10), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc11 = scipy.fromfile(open(data_file_loc11), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc12 = scipy.fromfile(open(data_file_loc12), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINENEWLINENEWLINENEWLINE# PREPARING THE DATA WITHOUT TIME INFORMATIONNEWLINEno_of_data_loc1 = iqdata_loc1.shape[0]NEWLINEno_of_data_loc2 = iqdata_loc2.shape[0]NEWLINEno_of_data_loc3 = iqdata_loc3.shape[0]NEWLINEno_of_data_loc4 = iqdata_loc4.shape[0]NEWLINENEWLINEno_of_data_loc5 = iqdata_loc5.shape[0]NEWLINEno_of_data_loc6 = iqdata_loc6.shape[0]NEWLINEno_of_data_loc7 = iqdata_loc7.shape[0]NEWLINEno_of_data_loc8 = iqdata_loc8.shape[0]NEWLINENEWLINEno_of_data_loc9 = iqdata_loc9.shape[0]NEWLINEno_of_data_loc10 = iqdata_loc10.shape[0]NEWLINEno_of_data_loc11 = iqdata_loc11.shape[0]NEWLINEno_of_data_loc12 = iqdata_loc12.shape[0]NEWLINENEWLINENEWLINENEWLINE################################################################################################################NEWLINE# CONCATINATING THE I AND Q VALUES VERTICALLY OF (I, Q) SAMPLE. -- note the axis argument is set to 1 (means vertical stacking)NEWLINE# SIMULATNEOUSLY MULTIPLYING WITH THE WEIGHT MATRIX - TO REFLECT THE MULTI-ANGULAR PROJECTIONNEWLINENEWLINExdata_loc1= np.concatenate([iqdata_loc1['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc1['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc1 = np.matmul(xdata_loc1, np.transpose(W))NEWLINENEWLINENEWLINExdata_loc2= np.concatenate([iqdata_loc2['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc2['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc2 = np.matmul(xdata_loc2, np.transpose(W))NEWLINENEWLINENEWLINExdata_loc3= np.concatenate([iqdata_loc3['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc3['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc3 = np.matmul(xdata_loc3, np.transpose(W))NEWLINENEWLINENEWLINExdata_loc4= np.concatenate([iqdata_loc4['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc4['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc4 = np.matmul(xdata_loc4, np.transpose(W))NEWLINENEWLINENEWLINENEWLINENEWLINExdata_loc5= np.concatenate([iqdata_loc5['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc5['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc5 = np.matmul(xdata_loc5, np.transpose(W))NEWLINENEWLINExdata_loc6= np.concatenate([iqdata_loc6['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc6['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc6 = np.matmul(xdata_loc6, np.transpose(W))NEWLINENEWLINENEWLINExdata_loc7= np.concatenate([iqdata_loc7['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc7['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc7 = np.matmul(xdata_loc7, np.transpose(W))NEWLINENEWLINENEWLINExdata_loc8= np.concatenate([iqdata_loc8['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc8['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc8 = np.matmul(xdata_loc8, np.transpose(W))NEWLINENEWLINENEWLINENEWLINENEWLINExdata_loc9= np.concatenate([iqdata_loc9['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc9['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc9 = np.matmul(xdata_loc9, np.transpose(W))NEWLINENEWLINENEWLINExdata_loc10= np.concatenate([iqdata_loc10['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc10['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc10 = np.matmul(xdata_loc10, np.transpose(W))NEWLINENEWLINENEWLINExdata_loc11= np.concatenate([iqdata_loc11['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc11['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc11 = np.matmul(xdata_loc11, np.transpose(W))NEWLINENEWLINENEWLINExdata_loc12= np.concatenate([iqdata_loc12['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc12['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc12 = np.matmul(xdata_loc12, np.transpose(W))NEWLINENEWLINENEWLINENEWLINENEWLINE# RESHAPING THE XDATANEWLINExdata_loc1= xdata_loc1.T.reshape(no_of_data_loc1//(sample_size), sample_size*no_of_features)NEWLINExdata_loc2 = xdata_loc2.T.reshape(no_of_data_loc2//(sample_size), sample_size*no_of_features)NEWLINExdata_loc3 = xdata_loc3.T.reshape(no_of_data_loc3//(sample_size), sample_size*no_of_features)NEWLINExdata_loc4 = xdata_loc4.T.reshape(no_of_data_loc4//(sample_size), sample_size*no_of_features)NEWLINENEWLINExdata_loc5= xdata_loc5.T.reshape(no_of_data_loc5//(sample_size), sample_size*no_of_features)NEWLINExdata_loc6 = xdata_loc6.T.reshape(no_of_data_loc6//(sample_size), sample_size*no_of_features)NEWLINExdata_loc7 = xdata_loc7.T.reshape(no_of_data_loc7//(sample_size), sample_size*no_of_features)NEWLINExdata_loc8 = xdata_loc8.T.reshape(no_of_data_loc8//(sample_size), sample_size*no_of_features)NEWLINENEWLINExdata_loc9= xdata_loc9.T.reshape(no_of_data_loc9//(sample_size), sample_size*no_of_features)NEWLINExdata_loc10 = xdata_loc10.T.reshape(no_of_data_loc10//(sample_size), sample_size*no_of_features)NEWLINExdata_loc11 = xdata_loc11.T.reshape(no_of_data_loc11//(sample_size), sample_size*no_of_features)NEWLINExdata_loc12 = xdata_loc12.T.reshape(no_of_data_loc12//(sample_size), sample_size*no_of_features)NEWLINENEWLINENEWLINENEWLINE#CONCATINATING THE DIFFERENT POSE DATA HORIZONTALLY (ROWWISE)NEWLINExdata = np.concatenate([xdata_loc1, xdata_loc2, xdata_loc3, xdata_loc4, xdata_loc5, xdata_loc6, xdata_loc7, xdata_loc8, xdata_loc9, xdata_loc10, xdata_loc11, xdata_loc12], axis=0)NEWLINENEWLINENEWLINENEWLINE# CREATING LABEL FOR THE DATASETSNEWLINEydata_loc1 = np.full(xdata_loc1.shape[0], 0, dtype=int)NEWLINEydata_loc2 = np.full(xdata_loc2.shape[0], 1, dtype=int)NEWLINEydata_loc3 = np.full(xdata_loc3.shape[0], 2, dtype=int)NEWLINEydata_loc4 = np.full(xdata_loc4.shape[0], 3, dtype=int)NEWLINENEWLINEydata_loc5 = np.full(xdata_loc5.shape[0], 4, dtype=int)NEWLINEydata_loc6 = np.full(xdata_loc6.shape[0], 5, dtype=int)NEWLINEydata_loc7 = np.full(xdata_loc7.shape[0], 6, dtype=int)NEWLINEydata_loc8 = np.full(xdata_loc8.shape[0], 7, dtype=int)NEWLINENEWLINEydata_loc9 = np.full(xdata_loc9.shape[0], 8, dtype=int)NEWLINEydata_loc10 = np.full(xdata_loc10.shape[0], 9, dtype=int)NEWLINEydata_loc11 = np.full(xdata_loc11.shape[0], 10, dtype=int)NEWLINEydata_loc12 = np.full(xdata_loc12.shape[0], 11, dtype=int)NEWLINENEWLINE#CONCATINATING THE DIFFERENT POSE LABELS HORIZONTALLY (ROWWISE)NEWLINEydata = np.concatenate([ydata_loc1, ydata_loc2, ydata_loc3, ydata_loc4, ydata_loc5, ydata_loc6, ydata_loc7, ydata_loc8, ydata_loc9, ydata_loc10, ydata_loc11, ydata_loc12], axis=0)NEWLINENEWLINENEWLINE# PREPROCESSING X AND Y DATANEWLINExdata =xdata.astype(np.float)NEWLINENEWLINEydata = ydata.astype(np.int).flatten()NEWLINENEWLINE# REMOVING THE NANSNEWLINExdata = np.nan_to_num(xdata)NEWLINENEWLINENEWLINE# ############## RANDOMLY SHUFFLING THE DATA ###################NEWLINE#NEWLINE# first concatinate - TO MAINTIAN THE XDATA AND YDATA MAPPINGNEWLINExydata = np.concatenate([xdata.reshape(xdata.shape[0], xdata.shape[1]), ydata.reshape(ydata.shape[0], 1)], axis=1)NEWLINENEWLINEnp.random.shuffle(xydata)NEWLINENEWLINEprint("Shape of XYDATA", xydata.shape)NEWLINENEWLINExdata_10ft, ydata_10ft = xydata[:,0:sample_size*no_of_features], xydata[:,((sample_size*no_of_features))] # THE LAST COLUMN IS THE YDATA # USE 2 INSTEAD OF 8 OF YOU DO NOT USE MULTI-ANGULAR PROJECTIONNEWLINENEWLINENEWLINE################################################################################################################################NEWLINENEWLINE#############################################################################################################################NEWLINE######## #######NEWLINE######## READING THE 15FT DATA #######NEWLINE######## #######NEWLINE#############################################################################################################################NEWLINENEWLINEdata_file_loc1 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/15ft/0_15ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER DIRECTLY POINTING TO THE RECEIVERNEWLINEdata_file_loc2 ='/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/15ft/+30_15ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER ANTENNA IS 30 DEGREE LEFT TO THE RECEIVERNEWLINEdata_file_loc3 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/15ft/+60_15ft_06_16_2020_914MHz_indoor.dat'# TRANSMITTER ANTENNA IS 60 DEGREE LEFT TO THE RECEIVERNEWLINEdata_file_loc4 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/15ft/+90_15ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER ANTENNA IS 90 DEGREE LEFT TO THE RECEIVERNEWLINENEWLINEdata_file_loc5 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/15ft/+120_15ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER ANTENNA IS 120 DEGREE LEFT TO THE RECEIVERNEWLINEdata_file_loc6 ='/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/15ft/+150_15ft_06_16_2020_914MHz_indoor.dat'# TRANSMITTER ANTENNA IS 150 DEGREE LEFT TO THE RECEIVERNEWLINEdata_file_loc7 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/15ft/180_15ft_06_16_2020_914MHz_indoor.dat'# TRANSMITTER ANTENNA IS DIRECTLY POINTED AWAY FROM THE RECEIVERNEWLINEdata_file_loc8 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/15ft/-150_15ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER ANTENNA IS 30 DEGREE RIGHT TO THE RECEIVERNEWLINENEWLINEdata_file_loc9 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/15ft/-120_15ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER ANTENNA IS 60 DEGREE RIGHT TO THE RECEIVERNEWLINEdata_file_loc10 ='/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/15ft/-90_15ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER ANTENNA IS 90 DEGREE RIGHT TO THE RECEIVERNEWLINEdata_file_loc11 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/15ft/-60_15ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER ANTENNA IS 120 DEGREE RIGHT TO THE RECEIVERNEWLINEdata_file_loc12 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/15ft/-30_15ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER ANTENNA IS 150 DEGREE RIGHT TO THE RECEIVERNEWLINENEWLINENEWLINENEWLINEiqdata_loc1 = scipy.fromfile(open(data_file_loc1), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc2 = scipy.fromfile(open(data_file_loc2), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc3 = scipy.fromfile(open(data_file_loc3), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc4 = scipy.fromfile(open(data_file_loc4), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINENEWLINEiqdata_loc5 = scipy.fromfile(open(data_file_loc5), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc6 = scipy.fromfile(open(data_file_loc6), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc7 = scipy.fromfile(open(data_file_loc7), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc8 = scipy.fromfile(open(data_file_loc8), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINENEWLINENEWLINEiqdata_loc9 = scipy.fromfile(open(data_file_loc9), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc10 = scipy.fromfile(open(data_file_loc10), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc11 = scipy.fromfile(open(data_file_loc11), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc12 = scipy.fromfile(open(data_file_loc12), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINENEWLINENEWLINENEWLINE# PREPARING THE DATA WITHOUT TIME INFORMATIONNEWLINEno_of_data_loc1 = iqdata_loc1.shape[0]NEWLINEno_of_data_loc2 = iqdata_loc2.shape[0]NEWLINEno_of_data_loc3 = iqdata_loc3.shape[0]NEWLINEno_of_data_loc4 = iqdata_loc4.shape[0]NEWLINENEWLINEno_of_data_loc5 = iqdata_loc5.shape[0]NEWLINEno_of_data_loc6 = iqdata_loc6.shape[0]NEWLINEno_of_data_loc7 = iqdata_loc7.shape[0]NEWLINEno_of_data_loc8 = iqdata_loc8.shape[0]NEWLINENEWLINEno_of_data_loc9 = iqdata_loc9.shape[0]NEWLINEno_of_data_loc10 = iqdata_loc10.shape[0]NEWLINEno_of_data_loc11 = iqdata_loc11.shape[0]NEWLINEno_of_data_loc12 = iqdata_loc12.shape[0]NEWLINENEWLINENEWLINENEWLINE################################################################################################################NEWLINE# CONCATINATING THE I AND Q VALUES VERTICALLY OF (I, Q) SAMPLE. -- note the axis argument is set to 1 (means vertical stacking)NEWLINE# SIMULATNEOUSLY MULTIPLYING WITH THE WEIGHT MATRIX - TO REFLECT THE MULTI-ANGULAR PROJECTIONNEWLINENEWLINExdata_loc1= np.concatenate([iqdata_loc1['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc1['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc1 = np.matmul(xdata_loc1, np.transpose(W))NEWLINENEWLINENEWLINExdata_loc2= np.concatenate([iqdata_loc2['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc2['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc2 = np.matmul(xdata_loc2, np.transpose(W))NEWLINENEWLINENEWLINExdata_loc3= np.concatenate([iqdata_loc3['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc3['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc3 = np.matmul(xdata_loc3, np.transpose(W))NEWLINENEWLINENEWLINExdata_loc4= np.concatenate([iqdata_loc4['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc4['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc4 = np.matmul(xdata_loc4, np.transpose(W))NEWLINENEWLINENEWLINENEWLINENEWLINExdata_loc5= np.concatenate([iqdata_loc5['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc5['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc5 = np.matmul(xdata_loc5, np.transpose(W))NEWLINENEWLINExdata_loc6= np.concatenate([iqdata_loc6['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc6['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc6 = np.matmul(xdata_loc6, np.transpose(W))NEWLINENEWLINENEWLINExdata_loc7= np.concatenate([iqdata_loc7['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc7['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc7 = np.matmul(xdata_loc7, np.transpose(W))NEWLINENEWLINENEWLINExdata_loc8= np.concatenate([iqdata_loc8['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc8['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc8 = np.matmul(xdata_loc8, np.transpose(W))NEWLINENEWLINENEWLINENEWLINENEWLINExdata_loc9= np.concatenate([iqdata_loc9['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc9['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc9 = np.matmul(xdata_loc9, np.transpose(W))NEWLINENEWLINENEWLINExdata_loc10= np.concatenate([iqdata_loc10['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc10['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc10 = np.matmul(xdata_loc10, np.transpose(W))NEWLINENEWLINENEWLINExdata_loc11= np.concatenate([iqdata_loc11['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc11['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc11 = np.matmul(xdata_loc11, np.transpose(W))NEWLINENEWLINENEWLINExdata_loc12= np.concatenate([iqdata_loc12['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc12['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc12 = np.matmul(xdata_loc12, np.transpose(W))NEWLINENEWLINENEWLINENEWLINENEWLINE# RESHAPING THE XDATANEWLINExdata_loc1= xdata_loc1.T.reshape(no_of_data_loc1//(sample_size), sample_size*no_of_features)NEWLINExdata_loc2 = xdata_loc2.T.reshape(no_of_data_loc2//(sample_size), sample_size*no_of_features)NEWLINExdata_loc3 = xdata_loc3.T.reshape(no_of_data_loc3//(sample_size), sample_size*no_of_features)NEWLINExdata_loc4 = xdata_loc4.T.reshape(no_of_data_loc4//(sample_size), sample_size*no_of_features)NEWLINENEWLINExdata_loc5= xdata_loc5.T.reshape(no_of_data_loc5//(sample_size), sample_size*no_of_features)NEWLINExdata_loc6 = xdata_loc6.T.reshape(no_of_data_loc6//(sample_size), sample_size*no_of_features)NEWLINExdata_loc7 = xdata_loc7.T.reshape(no_of_data_loc7//(sample_size), sample_size*no_of_features)NEWLINExdata_loc8 = xdata_loc8.T.reshape(no_of_data_loc8//(sample_size), sample_size*no_of_features)NEWLINENEWLINExdata_loc9= xdata_loc9.T.reshape(no_of_data_loc9//(sample_size), sample_size*no_of_features)NEWLINExdata_loc10 = xdata_loc10.T.reshape(no_of_data_loc10//(sample_size), sample_size*no_of_features)NEWLINExdata_loc11 = xdata_loc11.T.reshape(no_of_data_loc11//(sample_size), sample_size*no_of_features)NEWLINExdata_loc12 = xdata_loc12.T.reshape(no_of_data_loc12//(sample_size), sample_size*no_of_features)NEWLINENEWLINENEWLINENEWLINE#CONCATINATING THE DIFFERENT POSE DATA HORIZONTALLY (ROWWISE)NEWLINExdata = np.concatenate([xdata_loc1, xdata_loc2, xdata_loc3, xdata_loc4, xdata_loc5, xdata_loc6, xdata_loc7, xdata_loc8, xdata_loc9, xdata_loc10, xdata_loc11, xdata_loc12], axis=0)NEWLINENEWLINENEWLINENEWLINE# CREATING LABEL FOR THE DATASETSNEWLINEydata_loc1 = np.full(xdata_loc1.shape[0], 0, dtype=int)NEWLINEydata_loc2 = np.full(xdata_loc2.shape[0], 1, dtype=int)NEWLINEydata_loc3 = np.full(xdata_loc3.shape[0], 2, dtype=int)NEWLINEydata_loc4 = np.full(xdata_loc4.shape[0], 3, dtype=int)NEWLINENEWLINEydata_loc5 = np.full(xdata_loc5.shape[0], 4, dtype=int)NEWLINEydata_loc6 = np.full(xdata_loc6.shape[0], 5, dtype=int)NEWLINEydata_loc7 = np.full(xdata_loc7.shape[0], 6, dtype=int)NEWLINEydata_loc8 = np.full(xdata_loc8.shape[0], 7, dtype=int)NEWLINENEWLINEydata_loc9 = np.full(xdata_loc9.shape[0], 8, dtype=int)NEWLINEydata_loc10 = np.full(xdata_loc10.shape[0], 9, dtype=int)NEWLINEydata_loc11 = np.full(xdata_loc11.shape[0], 10, dtype=int)NEWLINEydata_loc12 = np.full(xdata_loc12.shape[0], 11, dtype=int)NEWLINENEWLINE#CONCATINATING THE DIFFERENT POSE LABELS HORIZONTALLY (ROWWISE)NEWLINEydata = np.concatenate([ydata_loc1, ydata_loc2, ydata_loc3, ydata_loc4, ydata_loc5, ydata_loc6, ydata_loc7, ydata_loc8, ydata_loc9, ydata_loc10, ydata_loc11, ydata_loc12], axis=0)NEWLINENEWLINENEWLINE# PREPROCESSING X AND Y DATANEWLINExdata =xdata.astype(np.float)NEWLINENEWLINEydata = ydata.astype(np.int).flatten()NEWLINENEWLINE# REMOVING THE NANSNEWLINExdata = np.nan_to_num(xdata)NEWLINENEWLINENEWLINE# ############## RANDOMLY SHUFFLING THE DATA ###################NEWLINE#NEWLINE# first concatinate - TO MAINTIAN THE XDATA AND YDATA MAPPINGNEWLINExydata = np.concatenate([xdata.reshape(xdata.shape[0], xdata.shape[1]), ydata.reshape(ydata.shape[0], 1)], axis=1)NEWLINENEWLINEnp.random.shuffle(xydata)NEWLINENEWLINEprint("Shape of XYDATA", xydata.shape)NEWLINENEWLINExdata_15ft, ydata_15ft = xydata[:,0:sample_size*no_of_features], xydata[:,((sample_size*no_of_features))] # THE LAST COLUMN IS THE YDATA # USE 2 INSTEAD OF 8 OF YOU DO NOT USE MULTI-ANGULAR PROJECTIONNEWLINENEWLINENEWLINE################################################################################################################################NEWLINENEWLINE#############################################################################################################################NEWLINE######## #######NEWLINE######## HIERARCHICAL TRAINING- FIRST STEP #######NEWLINE######## FIRST CLASSIFYING THE DATA BASED ON DISTANCES #######NEWLINE######## PREDICTING DISTANCE BETWEEN THE RECEIVER AND TRANSMITTER #######NEWLINE######## #######NEWLINE#############################################################################################################################NEWLINENEWLINENEWLINExdata_ranging = np.concatenate([xdata_5ft, xdata_10ft, xdata_15ft], axis= 0 )NEWLINENEWLINE# CREATING LABEL FOR THE DATASETSNEWLINEydata_range1 = np.full(xdata_5ft.shape[0], 0, dtype=int)NEWLINEydata_range2 = np.full(xdata_10ft.shape[0], 1, dtype=int)NEWLINEydata_range3 = np.full(xdata_15ft.shape[0], 2, dtype=int)NEWLINENEWLINE#CONCATINATING THE DIFFERENT POSE LABELS HORIZONTALLY (ROWWISE)NEWLINEydata_ranging = np.concatenate([ydata_range1, ydata_range2, ydata_range3], axis=0)NEWLINENEWLINE#################### NORMALIZE THE X DATA #######################NEWLINENEWLINENEWLINEstandard = preprocessing.StandardScaler().fit(xdata_ranging) # Normalize the data with zero mean and unit variance for each columnNEWLINExdata_ranging = standard.transform(xdata_ranging)NEWLINENEWLINENEWLINENEWLINE############### SEPARATING TRAIN AND TEST DATA #######################NEWLINEprint("############## STARTING THE TRAINING TO PREDICT THE RANGE BETWEEN RECEIVER AND TRANSMITTER ##########################")NEWLINENEWLINExtrain_ranging, xtest_ranging, ytrain_ranging, ytest_ranging = train_test_split(xdata_ranging, ydata_ranging, test_size=0.2, shuffle = True, random_state=42) # Randomly shuffling and 80/20 is train/test sizeNEWLINEprint("XTRAIN AND XTEST SHAPE:", xtrain_ranging.shape, xtest_ranging.shape)NEWLINEprint("YTRAIN AND YTEST SHAPE:", ytrain_ranging.shape, ytest_ranging.shape)NEWLINENEWLINE# RESHAPING THE DATA FROM 2 DIMENSIONAL TO 4 DIMENSIONAL SHAPE - NEEDED TO APPLY TO USE 2D-CONVOLUTIONNEWLINE# reshape to be [samples][width][height][channels]NEWLINExtrain_ranging = xtrain_ranging.reshape((xtrain_ranging.shape[0], no_of_features, sample_size, 1)).astype('float32')NEWLINExtest_ranging = xtest_ranging.reshape((xtest_ranging.shape[0], no_of_features, sample_size, 1)).astype('float32')NEWLINENEWLINENEWLINEnum_classes = 3 # TOTAL NUMBER OF RANGESNEWLINENEWLINENEWLINENEWLINE# Convert labels to categorical one-hot encodingNEWLINEytrain_ranging_one_hot = to_categorical(ytrain_ranging, num_classes=num_classes) # DEFINE THE NUMBER OF TOTAL CLASSES IN LABELNEWLINEytest_ranging_one_hot = to_categorical(ytest_ranging, num_classes=num_classes)NEWLINENEWLINENEWLINEprint("XTRAIN AND XTEST SHAPE:", xtrain_ranging.shape, xtest_ranging.shape)NEWLINEprint("YTRAIN AND YTEST SHAPE:", ytrain_ranging_one_hot.shape, ytest_ranging_one_hot.shape)NEWLINENEWLINE############################################################NEWLINE# #NEWLINE######## Building a 2D Convolutional Neural Network #####NEWLINE# #NEWLINE############################################################NEWLINENEWLINEdr = 0.6 # dropout rate (%)NEWLINEbatch_size = 128 # Mini batch sizeNEWLINEnb_epoch = 100 # Number of Epoch (Give a higher number to get better accuracy)NEWLINENEWLINEclasses = ["5ft", "10ft", "15ft"] # CHANGE LABELNEWLINEin_shp = list(xtrain_ranging.shape[1:]) # Input DimensionNEWLINEprint(in_shp)NEWLINE# model = models.Sequential()NEWLINEtimesteps=1NEWLINEdata_dim=xtrain_ranging.shape[1]NEWLINENEWLINENEWLINENEWLINE# print ("AFTER RESHAPE")NEWLINEytrain_ranging_one_hot = np.reshape(ytrain_ranging_one_hot, (ytrain_ranging_one_hot.shape[0], num_classes)) # Used in trainingNEWLINEytest_ranging_one_hot = np.reshape(ytest_ranging_one_hot, (ytest_ranging_one_hot.shape[0], num_classes)) # Used in trainingNEWLINENEWLINEstart_time = time.time() # Taking start time to calculate overall execution timeNEWLINENEWLINE# Modeling the CNNNEWLINEmodel_ranging = Sequential()NEWLINENEWLINE# FIRST CONVOLUTIONAL LAYERNEWLINEmodel_ranging.add(Conv2D(128, (2, 2), input_shape=(no_of_features, sample_size, 1), activation='relu')) # CHANGE # Stride (1, 1)NEWLINEmodel_ranging.add(MaxPooling2D()) # Pool size: (2, 2) and stride (2, 2)NEWLINEmodel_ranging.add(Dropout(0.2))NEWLINENEWLINE# SECOND CONVOLUTIONAL LAYERNEWLINEmodel_ranging.add(Conv2D(64, (2, 2), activation='relu'))NEWLINEmodel_ranging.add(MaxPooling2D())NEWLINEmodel_ranging.add(Dropout(dr))NEWLINENEWLINEmodel_ranging.add(Flatten())NEWLINENEWLINE# FIRST DENSE LAYERNEWLINEmodel_ranging.add(Dense(256, activation='relu'))NEWLINENEWLINE# SECOND DENSE LAYERNEWLINEmodel_ranging.add(Dense(128, activation='relu'))NEWLINENEWLINE# OUTPUT LAYERNEWLINEmodel_ranging.add(Dense(num_classes, activation='softmax'))NEWLINENEWLINE# Compile modelNEWLINE# For a multi-class classification problemNEWLINEsgd = SGD(lr=0.0001, decay=1e-6, momentum=0.9, nesterov=True)NEWLINEadam = Adam(lr=0.0001, beta_1=0.9, beta_2=0.999, epsilon=1e-8)NEWLINENEWLINE# model.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['accuracy']) # Multiclass classification with rmspropNEWLINENEWLINE#model.compile(optimizer='sgd', loss='categorical_crossentropy',metrics=['acc', f1_m, precision_m, recall_m]) # Multiclass classification with rms adam optimizer # CHANGENEWLINENEWLINEmodel_ranging.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['acc', f1_m, precision_m, recall_m])NEWLINENEWLINEmodel_ranging.summary()NEWLINEfilepath = '/Users/debashri/Desktop/DirectionFinding_Plots/Indoor/direction_data_ranging_2D_CNN_Mapping.wts.h5'NEWLINEprint("The dropout rate was: ")NEWLINEprint(dr)NEWLINENEWLINENEWLINE# Fit the modelNEWLINE# history= model.fit(xtrain, ytrain_one_hot, epochs=nb_epoch, batch_size=batch_size, validation_data = (xtest, ytest_one_hot), callbacks = [keras.callbacks.ModelCheckpoint(filepath, monitor='val_loss', verbose=2, save_best_only=True, mode='auto'), keras.callbacks.EarlyStopping(monitor='val_loss', patience=5, verbose=2, mode='auto')])NEWLINEhistory = model_ranging.fit(xtrain_ranging, ytrain_ranging_one_hot, epochs=nb_epoch, batch_size=batch_size, validation_split=0.1, callbacks=[NEWLINE keras.callbacks.ModelCheckpoint(filepath, monitor='val_loss', verbose=2, save_best_only=True, mode='auto'),NEWLINE keras.callbacks.EarlyStopping(monitor='val_loss', patience=5, verbose=2, mode='auto')])NEWLINENEWLINENEWLINENEWLINE# SAVING THE MODEL FOR TRANSFER LEARNINGNEWLINEsaved_file = '/Users/debashri/Desktop/DirectionFinding_Plots/Indoor/2D_CNN_ranging_classifier.h5'NEWLINEmodel_ranging.save(saved_file) # SAVING THE MODEL FOR TRANSFER LEARNINGNEWLINENEWLINENEWLINENEWLINE# Evaluate the modelNEWLINEloss, accuracy, f1_score, precision, recall = model_ranging.evaluate(xtest_ranging, ytest_ranging_one_hot, batch_size=batch_size) # CHANGENEWLINEprint("\nTest Loss: %s: %.2f%%" % (model_ranging.metrics_names[0], loss * 100)) # CHANGENEWLINEprint("\nTest Accuracy: %s: %.2f%%" % (model_ranging.metrics_names[1], accuracy * 100)) # CHANGENEWLINEprint("\nTest F1 Score: %s: %.2f" % (model_ranging.metrics_names[2], f1_score)) # CHANGENEWLINEprint("\nTest Precision: %s: %.2f%%" % (model_ranging.metrics_names[3], precision * 100)) # CHANGENEWLINEprint("\nTest Recall: %s: %.2f%%" % (model_ranging.metrics_names[4], recall * 100)) # CHANGENEWLINENEWLINE# Calculating total execution timeNEWLINEend_time = time.time() # Taking end time to calculate overall execution timeNEWLINEprint("\n Total Execution Time (Minutes): ")NEWLINEprint(((end_time - start_time) / 60))NEWLINENEWLINE#### SET PLOTTING PARAMETERS #########NEWLINEparams = {'legend.fontsize': 'xx-large',NEWLINE 'axes.labelsize': 'xx-large',NEWLINE 'axes.titlesize': 'xx-large',NEWLINE 'xtick.labelsize': 'xx-large',NEWLINE 'ytick.labelsize': 'xx-large'}NEWLINEplt.rcParams.update(params)NEWLINENEWLINENEWLINE# Show Accuracy CurvesNEWLINEfig = plt.figure()NEWLINE# plt.title('Training Performance')NEWLINEplt.plot(history.epoch, history.history['acc'], label='Training Accuracy', linewidth=2.0, c='b')NEWLINEplt.plot(history.epoch, history.history['val_acc'], label='Validation Accuracy', linewidth=2.0, c='r')NEWLINEplt.ylabel('Accuracy(%)')NEWLINEplt.xlabel('Epoch')NEWLINEplt.legend()NEWLINEplt.tight_layout()NEWLINEfig.savefig('/Users/debashri/Desktop/DirectionFinding_Plots/Indoor/direction_ranging_acc_2D_CNN_Mapping.png') # save the figure to fileNEWLINEplt.close(fig)NEWLINENEWLINENEWLINE# plt.show()NEWLINENEWLINENEWLINEdef plot_confusion_matrix(cm, title='Confusion Matrix', cmap=plt.cm.YlGnBu, labels=[], normalize=False, filedest = ''):NEWLINE if normalize:NEWLINE cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]NEWLINE # print("Normalized confusion matrix")NEWLINE else:NEWLINE cm = cm.astype('int')NEWLINE # print('Confusion matrix, without normalization')NEWLINE plt.rcParams.update(params) # ADDEDNEWLINE fig = plt.figure(figsize=(12,12))NEWLINE plt.imshow(cm, interpolation='nearest', cmap=cmap)NEWLINE # plt.title(title)NEWLINE plt.colorbar()NEWLINE tick_marks = np.arange(len(labels))NEWLINE plt.xticks(tick_marks, labels, rotation=45)NEWLINE plt.yticks(tick_marks, labels)NEWLINE thresh = cm.max() / 2NEWLINE fmt = '.2f' if normalize else 'd'NEWLINE for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):NEWLINE # plt.text(j, i,"{:,}".format(cm[i, j]),NEWLINE plt.text(j, i, format(cm[i, j], fmt),NEWLINE horizontalalignment="center", fontsize="xx-large",NEWLINE color="white" if cm[i, j] > thresh else "black")NEWLINENEWLINE plt.ylabel('True label')NEWLINE plt.xlabel('Predicted label')NEWLINE # fig, ax = plt.subplots(nrows=1, ncols=1) # create figure & 1 axisNEWLINE # ax.plot([0, 1, 2], [10, 20, 3])NEWLINE plt.tight_layout()NEWLINE fig.savefig(filedest) # save the figure to fileNEWLINE plt.close(fig)NEWLINENEWLINENEWLINE# plt.show()NEWLINENEWLINENEWLINENEWLINE# Plot confusion matrixNEWLINEtest_Y_hat = model_ranging.predict(xtest_ranging, batch_size=batch_size)NEWLINEconf = np.zeros([len(classes), len(classes)])NEWLINEconfnorm = np.zeros([len(classes), len(classes)])NEWLINEfor i in range(0, xtest_ranging.shape[0]):NEWLINE j = list(ytest_ranging_one_hot[i, :]).index(1)NEWLINE k = int(np.argmax(test_Y_hat[i, :]))NEWLINE conf[j, k] = conf[j, k] + 1NEWLINEplot_confusion_matrix(conf, labels=classes, normalize=False, filedest='/Users/debashri/Desktop/DirectionFinding_Plots/Indoor/direction_ranging_conf_mat_2D_CNN_Mapping.png')NEWLINENEWLINENEWLINE#############################################################################################################################NEWLINE######## #######NEWLINE######## HIERARCHICAL TRAINING- SECOND STEP #######NEWLINE######## CLASSIFYING THE DATA BASED ON POSES OF TRANSMITER ANTENNA #######NEWLINE######## PREDICTING REALTIVE POSES OF TRANSMITER ANTENNA #######NEWLINE######## DISTANCE: 5FT #######NEWLINE#############################################################################################################################NEWLINENEWLINENEWLINE#################### NORMALIZE THE X DATA #######################NEWLINENEWLINENEWLINEstandard = preprocessing.StandardScaler().fit(xdata_5ft) # Normalize the data with zero mean and unit variance for each columnNEWLINExdata_5ft = standard.transform(xdata_5ft)NEWLINENEWLINEprint("############## STARTING THE TRAINING TO PREDICT THE POSES OF TRANSMITTER ANTENNA WITH 5 FT DISTANCE FROM RECEIVER ##########################")NEWLINENEWLINENEWLINENEWLINE############### SEPARATING TRAIN AND TEST DATA #######################NEWLINENEWLINExtrain_5ft, xtest_5ft, ytrain_5ft, ytest_5ft = train_test_split(xdata_5ft, ydata_5ft, test_size=0.2, shuffle = True, random_state=42) # Randomly shuffling and 80/20 is train/test sizeNEWLINEprint("XTRAIN AND XTEST SHAPE:", xtrain_5ft.shape, xtest_5ft.shape)NEWLINEprint("YTRAIN AND YTEST SHAPE:", ytrain_5ft.shape, ytest_5ft.shape)NEWLINENEWLINE# RESHAPING THE DATA FROM 2 DIMENSIONAL TO 4 DIMENSIONAL SHAPE - NEEDED TO APPLY TO USE 2D-CONVOLUTIONNEWLINE# reshape to be [samples][width][height][channels]NEWLINExtrain_5ft = xtrain_5ft.reshape((xtrain_5ft.shape[0], no_of_features, sample_size, 1)).astype('float32')NEWLINExtest_5ft = xtest_5ft.reshape((xtest_5ft.shape[0], no_of_features, sample_size, 1)).astype('float32')NEWLINENEWLINENEWLINEnum_classes = 12 # TOTAL NUMBER OF RANGESNEWLINENEWLINENEWLINENEWLINE# Convert labels to categorical one-hot encodingNEWLINEytrain_5ft_one_hot = to_categorical(ytrain_5ft, num_classes=num_classes) # DEFINE THE NUMBER OF TOTAL CLASSES IN LABELNEWLINEytest_5ft_one_hot = to_categorical(ytest_5ft, num_classes=num_classes)NEWLINENEWLINENEWLINEprint("XTRAIN AND XTEST SHAPE:", xtrain_5ft.shape, xtest_5ft.shape)NEWLINEprint("YTRAIN AND YTEST SHAPE:", ytrain_5ft_one_hot.shape, ytest_5ft_one_hot.shape)NEWLINENEWLINE############################################################NEWLINE# #NEWLINE######## Building a 2D Convolutional Neural Network #####NEWLINE# #NEWLINE############################################################NEWLINENEWLINEdr = 0.6 # dropout rate (%)NEWLINEbatch_size = 128 # Mini batch sizeNEWLINEnb_epoch = 100 # Number of Epoch (Give a higher number to get better accuracy)NEWLINENEWLINEclasses = ["0", "+30", "+60", "+90", "+120", "+150", "180", "-150", "-120", "-90", "-60", "-30"] # CHANGE LABELNEWLINEin_shp = list(xtrain_5ft.shape[1:]) # Input DimensionNEWLINEprint(in_shp)NEWLINE# model = models.Sequential()NEWLINEtimesteps=1NEWLINEdata_dim=xtrain_5ft.shape[1]NEWLINENEWLINENEWLINENEWLINE# print ("AFTER RESHAPE")NEWLINEytrain_5ft_one_hot = np.reshape(ytrain_5ft_one_hot, (ytrain_5ft_one_hot.shape[0], num_classes)) # Used in trainingNEWLINEytest_5ft_one_hot = np.reshape(ytest_5ft_one_hot, (ytest_5ft_one_hot.shape[0], num_classes)) # Used in trainingNEWLINENEWLINEstart_time = time.time() # Taking start time to calculate overall execution timeNEWLINENEWLINE# IMPLEMENTING THE TRANSFER LEARNINGNEWLINE#source_model = load_model(saved_file)NEWLINE# loading the previously saved modelNEWLINEsource_model = load_model(saved_file, custom_objects={NEWLINE "f1_m": f1_m,NEWLINE "precision_m": precision_m,NEWLINE "recall_m": recall_mNEWLINE })NEWLINENEWLINEmodel_pose = Sequential()NEWLINEfor layer in source_model.layers[:-1]: # go through until last layerNEWLINE model_pose.add(layer)NEWLINENEWLINENEWLINE# ADDING OUTPUT LAYERNEWLINEmodel_pose.add(Dense(num_classes, activation='softmax'))NEWLINENEWLINE# Compile modelNEWLINE# For a multi-class classification problemNEWLINEsgd = SGD(lr=0.0001, decay=1e-6, momentum=0.9, nesterov=True)NEWLINEadam = Adam(lr=0.0001, beta_1=0.9, beta_2=0.999, epsilon=1e-8)NEWLINENEWLINE# model.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['accuracy']) # Multiclass classification with rmspropNEWLINENEWLINE#model.compile(optimizer='sgd', loss='categorical_crossentropy',metrics=['acc', f1_m, precision_m, recall_m]) # Multiclass classification with rms adam optimizer # CHANGENEWLINENEWLINEmodel_pose.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['acc', f1_m, precision_m, recall_m])NEWLINENEWLINEmodel_pose.summary()NEWLINEfilepath = '/Users/debashri/Desktop/DirectionFinding_Plots/Indoor/5ft/direction_data_12_poses_2D_CNN_Mapping.wts.h5'NEWLINEprint("The dropout rate was: ")NEWLINEprint(dr)NEWLINENEWLINENEWLINE# Fit the modelNEWLINE# history= model.fit(xtrain, ytrain_one_hot, epochs=nb_epoch, batch_size=batch_size, validation_data = (xtest, ytest_one_hot), callbacks = [keras.callbacks.ModelCheckpoint(filepath, monitor='val_loss', verbose=2, save_best_only=True, mode='auto'), keras.callbacks.EarlyStopping(monitor='val_loss', patience=5, verbose=2, mode='auto')])NEWLINEhistory = model_pose.fit(xtrain_5ft, ytrain_5ft_one_hot, epochs=nb_epoch, batch_size=batch_size, validation_split=0.1, callbacks=[NEWLINE keras.callbacks.ModelCheckpoint(filepath, monitor='val_loss', verbose=2, save_best_only=True, mode='auto'),NEWLINE keras.callbacks.EarlyStopping(monitor='val_loss', patience=5, verbose=2, mode='auto')])NEWLINENEWLINENEWLINE# Evaluate the modelNEWLINEloss, accuracy, f1_score, precision, recall = model_pose.evaluate(xtest_5ft, ytest_5ft_one_hot, batch_size=batch_size) # CHANGENEWLINEprint("\nTest Loss: %s: %.2f%%" % (model_pose.metrics_names[0], loss * 100)) # CHANGENEWLINEprint("\nTest Accuracy: %s: %.2f%%" % (model_pose.metrics_names[1], accuracy * 100)) # CHANGENEWLINEprint("\nTest F1 Score: %s: %.2f" % (model_pose.metrics_names[2], f1_score)) # CHANGENEWLINEprint("\nTest Precision: %s: %.2f%%" % (model_pose.metrics_names[3], precision * 100)) # CHANGENEWLINEprint("\nTest Recall: %s: %.2f%%" % (model_pose.metrics_names[4], recall * 100)) # CHANGENEWLINENEWLINE# Calculating total execution timeNEWLINEend_time = time.time() # Taking end time to calculate overall execution timeNEWLINEprint("\n Total Execution Time (Minutes): ")NEWLINEprint(((end_time - start_time) / 60))NEWLINENEWLINE#### SET PLOTTING PARAMETERS #########NEWLINEparams = {'legend.fontsize': 'xx-large',NEWLINE 'axes.labelsize': 'xx-large',NEWLINE 'axes.titlesize': 'xx-large',NEWLINE 'xtick.labelsize': 'xx-large',NEWLINE 'ytick.labelsize': 'xx-large'}NEWLINEplt.rcParams.update(params)NEWLINENEWLINENEWLINE# Show Accuracy CurvesNEWLINEfig = plt.figure()NEWLINE# plt.title('Training Performance')NEWLINEplt.plot(history.epoch, history.history['acc'], label='Training Accuracy', linewidth=2.0, c='b')NEWLINEplt.plot(history.epoch, history.history['val_acc'], label='Validation Accuracy', linewidth=2.0, c='r')NEWLINEplt.ylabel('Accuracy(%)')NEWLINEplt.xlabel('Epoch')NEWLINEplt.legend()NEWLINEplt.tight_layout()NEWLINEfig.savefig('/Users/debashri/Desktop/DirectionFinding_Plots/Indoor/5ft/direction_12_poses_acc_2D_CNN_Mapping.png') # save the figure to fileNEWLINEplt.close(fig)NEWLINENEWLINENEWLINE# plt.show()NEWLINENEWLINENEWLINE# Plot confusion matrixNEWLINEtest_Y_hat = model_pose.predict(xtest_5ft, batch_size=batch_size)NEWLINEconf = np.zeros([len(classes), len(classes)])NEWLINEconfnorm = np.zeros([len(classes), len(classes)])NEWLINEfor i in range(0, xtest_5ft.shape[0]):NEWLINE j = list(ytest_5ft_one_hot[i, :]).index(1)NEWLINE k = int(np.argmax(test_Y_hat[i, :]))NEWLINE conf[j, k] = conf[j, k] + 1NEWLINEplot_confusion_matrix(conf, labels=classes, normalize=False, filedest= '/Users/debashri/Desktop/DirectionFinding_Plots/Indoor/5ft/direction_12_poses_conf_mat_2D_CNN_Mapping.png')NEWLINENEWLINENEWLINE#############################################################################################################################NEWLINE######## #######NEWLINE######## HIERARCHICAL TRAINING- SECOND STEP #######NEWLINE######## CLASSIFYING THE DATA BASED ON POSES OF TRANSMITER ANTENNA #######NEWLINE######## PREDICTING REALTIVE POSES OF TRANSMITER ANTENNA #######NEWLINE######## DISTANCE: 10FT #######NEWLINE#############################################################################################################################NEWLINENEWLINENEWLINE#################### NORMALIZE THE X DATA #######################NEWLINENEWLINENEWLINEstandard = preprocessing.StandardScaler().fit(xdata_10ft) # Normalize the data with zero mean and unit variance for each columnNEWLINExdata_10ft = standard.transform(xdata_10ft)NEWLINENEWLINEprint("############## STARTING THE TRAINING TO PREDICT THE POSES OF TRANSMITTER ANTENNA WITH 10 FT DISTANCE FROM RECEIVER ##########################")NEWLINENEWLINENEWLINE############### SEPARATING TRAIN AND TEST DATA #######################NEWLINENEWLINExtrain_10ft, xtest_10ft, ytrain_10ft, ytest_10ft = train_test_split(xdata_10ft, ydata_10ft, test_size=0.2, shuffle = True, random_state=42) # Randomly shuffling and 80/20 is train/test sizeNEWLINEprint("XTRAIN AND XTEST SHAPE:", xtrain_10ft.shape, xtest_10ft.shape)NEWLINEprint("YTRAIN AND YTEST SHAPE:", ytrain_10ft.shape, ytest_10ft.shape)NEWLINENEWLINE# RESHAPING THE DATA FROM 2 DIMENSIONAL TO 4 DIMENSIONAL SHAPE - NEEDED TO APPLY TO USE 2D-CONVOLUTIONNEWLINE# reshape to be [samples][width][height][channels]NEWLINExtrain_10ft = xtrain_10ft.reshape((xtrain_10ft.shape[0], no_of_features, sample_size, 1)).astype('float32')NEWLINExtest_10ft = xtest_10ft.reshape((xtest_10ft.shape[0], no_of_features, sample_size, 1)).astype('float32')NEWLINENEWLINENEWLINEnum_classes = 12 # TOTAL NUMBER OF RANGESNEWLINENEWLINENEWLINENEWLINE# Convert labels to categorical one-hot encodingNEWLINEytrain_10ft_one_hot = to_categorical(ytrain_10ft, num_classes=num_classes) # DEFINE THE NUMBER OF TOTAL CLASSES IN LABELNEWLINEytest_10ft_one_hot = to_categorical(ytest_10ft, num_classes=num_classes)NEWLINENEWLINENEWLINEprint("XTRAIN AND XTEST SHAPE:", xtrain_10ft.shape, xtest_10ft.shape)NEWLINEprint("YTRAIN AND YTEST SHAPE:", ytrain_10ft_one_hot.shape, ytest_10ft_one_hot.shape)NEWLINENEWLINE############################################################NEWLINE# #NEWLINE######## Building a 2D Convolutional Neural Network #####NEWLINE# #NEWLINE############################################################NEWLINENEWLINEdr = 0.6 # dropout rate (%)NEWLINEbatch_size = 128 # Mini batch sizeNEWLINEnb_epoch = 100 # Number of Epoch (Give a higher number to get better accuracy)NEWLINENEWLINEclasses = ["0", "+30", "+60", "+90", "+120", "+150", "180", "-150", "-120", "-90", "-60", "-30"] # CHANGE LABELNEWLINEin_shp = list(xtrain_10ft.shape[1:]) # Input DimensionNEWLINEprint(in_shp)NEWLINE# model = models.Sequential()NEWLINEtimesteps=1NEWLINEdata_dim=xtrain_10ft.shape[1]NEWLINENEWLINENEWLINENEWLINE# print ("AFTER RESHAPE")NEWLINEytrain_10ft_one_hot = np.reshape(ytrain_10ft_one_hot, (ytrain_10ft_one_hot.shape[0], num_classes)) # Used in trainingNEWLINEytest_10ft_one_hot = np.reshape(ytest_10ft_one_hot, (ytest_10ft_one_hot.shape[0], num_classes)) # Used in trainingNEWLINENEWLINEstart_time = time.time() # Taking start time to calculate overall execution timeNEWLINENEWLINE# IMPLEMENTING THE TRANSFER LEARNINGNEWLINE#source_model = load_model(saved_file)NEWLINE# loading the previously saved modelNEWLINEsource_model = load_model(saved_file, custom_objects={NEWLINE "f1_m": f1_m,NEWLINE "precision_m": precision_m,NEWLINE "recall_m": recall_mNEWLINE })NEWLINENEWLINEmodel_pose = Sequential()NEWLINEfor layer in source_model.layers[:-1]: # go through until last layerNEWLINE model_pose.add(layer)NEWLINENEWLINENEWLINE# ADDING OUTPUT LAYERNEWLINEmodel_pose.add(Dense(num_classes, activation='softmax'))NEWLINENEWLINE# Compile modelNEWLINE# For a multi-class classification problemNEWLINEsgd = SGD(lr=0.0001, decay=1e-6, momentum=0.9, nesterov=True)NEWLINEadam = Adam(lr=0.0001, beta_1=0.9, beta_2=0.999, epsilon=1e-8)NEWLINENEWLINE# model.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['accuracy']) # Multiclass classification with rmspropNEWLINENEWLINE#model.compile(optimizer='sgd', loss='categorical_crossentropy',metrics=['acc', f1_m, precision_m, recall_m]) # Multiclass classification with rms adam optimizer # CHANGENEWLINENEWLINEmodel_pose.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['acc', f1_m, precision_m, recall_m])NEWLINENEWLINEmodel_pose.summary()NEWLINEfilepath = '/Users/debashri/Desktop/DirectionFinding_Plots/Indoor/10ft/direction_data_12_poses_2D_CNN_Mapping.wts.h5'NEWLINEprint("The dropout rate was: ")NEWLINEprint(dr)NEWLINENEWLINENEWLINE# Fit the modelNEWLINE# history= model.fit(xtrain, ytrain_one_hot, epochs=nb_epoch, batch_size=batch_size, validation_data = (xtest, ytest_one_hot), callbacks = [keras.callbacks.ModelCheckpoint(filepath, monitor='val_loss', verbose=2, save_best_only=True, mode='auto'), keras.callbacks.EarlyStopping(monitor='val_loss', patience=5, verbose=2, mode='auto')])NEWLINEhistory = model_pose.fit(xtrain_10ft, ytrain_10ft_one_hot, epochs=nb_epoch, batch_size=batch_size, validation_split=0.1, callbacks=[NEWLINE keras.callbacks.ModelCheckpoint(filepath, monitor='val_loss', verbose=2, save_best_only=True, mode='auto'),NEWLINE keras.callbacks.EarlyStopping(monitor='val_loss', patience=5, verbose=2, mode='auto')])NEWLINENEWLINENEWLINE# Evaluate the modelNEWLINEloss, accuracy, f1_score, precision, recall = model_pose.evaluate(xtest_10ft, ytest_10ft_one_hot, batch_size=batch_size) # CHANGENEWLINEprint("\nTest Loss: %s: %.2f%%" % (model_pose.metrics_names[0], loss * 100)) # CHANGENEWLINEprint("\nTest Accuracy: %s: %.2f%%" % (model_pose.metrics_names[1], accuracy * 100)) # CHANGENEWLINEprint("\nTest F1 Score: %s: %.2f" % (model_pose.metrics_names[2], f1_score)) # CHANGENEWLINEprint("\nTest Precision: %s: %.2f%%" % (model_pose.metrics_names[3], precision * 100)) # CHANGENEWLINEprint("\nTest Recall: %s: %.2f%%" % (model_pose.metrics_names[4], recall * 100)) # CHANGENEWLINENEWLINE# Calculating total execution timeNEWLINEend_time = time.time() # Taking end time to calculate overall execution timeNEWLINEprint("\n Total Execution Time (Minutes): ")NEWLINEprint(((end_time - start_time) / 60))NEWLINENEWLINE#### SET PLOTTING PARAMETERS #########NEWLINEparams = {'legend.fontsize': 'xx-large',NEWLINE 'axes.labelsize': 'xx-large',NEWLINE 'axes.titlesize': 'xx-large',NEWLINE 'xtick.labelsize': 'xx-large',NEWLINE 'ytick.labelsize': 'xx-large'}NEWLINEplt.rcParams.update(params)NEWLINENEWLINENEWLINE# Show Accuracy CurvesNEWLINEfig = plt.figure()NEWLINE# plt.title('Training Performance')NEWLINEplt.plot(history.epoch, history.history['acc'], label='Training Accuracy', linewidth=2.0, c='b')NEWLINEplt.plot(history.epoch, history.history['val_acc'], label='Validation Accuracy', linewidth=2.0, c='r')NEWLINEplt.ylabel('Accuracy(%)')NEWLINEplt.xlabel('Epoch')NEWLINEplt.legend()NEWLINEplt.tight_layout()NEWLINEfig.savefig('/Users/debashri/Desktop/DirectionFinding_Plots/Indoor/10ft/direction_12_poses_acc_2D_CNN_Mapping.png') # save the figure to fileNEWLINEplt.close(fig)NEWLINENEWLINENEWLINE# plt.show()NEWLINENEWLINENEWLINE# Plot confusion matrixNEWLINEtest_Y_hat = model_pose.predict(xtest_10ft, batch_size=batch_size)NEWLINEconf = np.zeros([len(classes), len(classes)])NEWLINEconfnorm = np.zeros([len(classes), len(classes)])NEWLINEfor i in range(0, xtest_10ft.shape[0]):NEWLINE j = list(ytest_10ft_one_hot[i, :]).index(1)NEWLINE k = int(np.argmax(test_Y_hat[i, :]))NEWLINE conf[j, k] = conf[j, k] + 1NEWLINEplot_confusion_matrix(conf, labels=classes, normalize=False, filedest= '/Users/debashri/Desktop/DirectionFinding_Plots/Indoor/10ft/direction_12_poses_conf_mat_2D_CNN_Mapping.png')NEWLINENEWLINENEWLINE#############################################################################################################################NEWLINE######## #######NEWLINE######## HIERARCHICAL TRAINING- SECOND STEP #######NEWLINE######## CLASSIFYING THE DATA BASED ON POSES OF TRANSMITER ANTENNA #######NEWLINE######## PREDICTING REALTIVE POSES OF TRANSMITER ANTENNA #######NEWLINE######## DISTANCE: 15FT #######NEWLINE#############################################################################################################################NEWLINENEWLINENEWLINE#################### NORMALIZE THE X DATA #######################NEWLINENEWLINENEWLINEstandard = preprocessing.StandardScaler().fit(xdata_15ft) # Normalize the data with zero mean and unit variance for each columnNEWLINExdata_15ft = standard.transform(xdata_15ft)NEWLINENEWLINEprint("############## STARTING THE TRAINING TO PREDICT THE POSES OF TRANSMITTER ANTENNA WITH 15 FT DISTANCE FROM RECEIVER ##########################")NEWLINENEWLINENEWLINE############### SEPARATING TRAIN AND TEST DATA #######################NEWLINENEWLINExtrain_15ft, xtest_15ft, ytrain_15ft, ytest_15ft = train_test_split(xdata_15ft, ydata_15ft, test_size=0.2, shuffle = True, random_state=42) # Randomly shuffling and 80/20 is train/test sizeNEWLINEprint("XTRAIN AND XTEST SHAPE:", xtrain_5ft.shape, xtest_5ft.shape)NEWLINEprint("YTRAIN AND YTEST SHAPE:", ytrain_5ft.shape, ytest_5ft.shape)NEWLINENEWLINE# RESHAPING THE DATA FROM 2 DIMENSIONAL TO 4 DIMENSIONAL SHAPE - NEEDED TO APPLY TO USE 2D-CONVOLUTIONNEWLINE# reshape to be [samples][width][height][channels]NEWLINExtrain_15ft = xtrain_15ft.reshape((xtrain_15ft.shape[0], no_of_features, sample_size, 1)).astype('float32')NEWLINExtest_15ft = xtest_15ft.reshape((xtest_15ft.shape[0], no_of_features, sample_size, 1)).astype('float32')NEWLINENEWLINENEWLINEnum_classes = 12 # TOTAL NUMBER OF RANGESNEWLINENEWLINENEWLINENEWLINE# Convert labels to categorical one-hot encodingNEWLINEytrain_15ft_one_hot = to_categorical(ytrain_15ft, num_classes=num_classes) # DEFINE THE NUMBER OF TOTAL CLASSES IN LABELNEWLINEytest_15ft_one_hot = to_categorical(ytest_15ft, num_classes=num_classes)NEWLINENEWLINENEWLINEprint("XTRAIN AND XTEST SHAPE:", xtrain_15ft.shape, xtest_15ft.shape)NEWLINEprint("YTRAIN AND YTEST SHAPE:", ytrain_15ft_one_hot.shape, ytest_15ft_one_hot.shape)NEWLINENEWLINE############################################################NEWLINE# #NEWLINE######## Building a 2D Convolutional Neural Network #####NEWLINE# #NEWLINE############################################################NEWLINENEWLINEdr = 0.6 # dropout rate (%)NEWLINEbatch_size = 128 # Mini batch sizeNEWLINEnb_epoch = 100 # Number of Epoch (Give a higher number to get better accuracy)NEWLINENEWLINEclasses = ["0", "+30", "+60", "+90", "+120", "+150", "180", "-150", "-120", "-90", "-60", "-30"] # CHANGE LABELNEWLINEin_shp = list(xtrain_15ft.shape[1:]) # Input DimensionNEWLINEprint(in_shp)NEWLINE# model = models.Sequential()NEWLINEtimesteps=1NEWLINEdata_dim=xtrain_15ft.shape[1]NEWLINENEWLINENEWLINENEWLINE# print ("AFTER RESHAPE")NEWLINEytrain_15ft_one_hot = np.reshape(ytrain_15ft_one_hot, (ytrain_15ft_one_hot.shape[0], num_classes)) # Used in trainingNEWLINEytest_15ft_one_hot = np.reshape(ytest_15ft_one_hot, (ytest_15ft_one_hot.shape[0], num_classes)) # Used in trainingNEWLINENEWLINEstart_time = time.time() # Taking start time to calculate overall execution timeNEWLINENEWLINE# IMPLEMENTING THE TRANSFER LEARNINGNEWLINE#source_model = load_model(saved_file)NEWLINE# loading the previously saved modelNEWLINEsource_model = load_model(saved_file, custom_objects={NEWLINE "f1_m": f1_m,NEWLINE "precision_m": precision_m,NEWLINE "recall_m": recall_mNEWLINE })NEWLINENEWLINEmodel_pose = Sequential()NEWLINEfor layer in source_model.layers[:-1]: # go through until last layerNEWLINE model_pose.add(layer)NEWLINENEWLINENEWLINE# ADDING OUTPUT LAYERNEWLINEmodel_pose.add(Dense(num_classes, activation='softmax'))NEWLINENEWLINE# Compile modelNEWLINE# For a multi-class classification problemNEWLINEsgd = SGD(lr=0.0001, decay=1e-6, momentum=0.9, nesterov=True)NEWLINEadam = Adam(lr=0.0001, beta_1=0.9, beta_2=0.999, epsilon=1e-8)NEWLINENEWLINE# model.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['accuracy']) # Multiclass classification with rmspropNEWLINENEWLINE#model.compile(optimizer='sgd', loss='categorical_crossentropy',metrics=['acc', f1_m, precision_m, recall_m]) # Multiclass classification with rms adam optimizer # CHANGENEWLINENEWLINEmodel_pose.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['acc', f1_m, precision_m, recall_m])NEWLINENEWLINEmodel_pose.summary()NEWLINEfilepath = '/Users/debashri/Desktop/DirectionFinding_Plots/Indoor/15ft/direction_data_12_poses_2D_CNN_Mapping.wts.h5'NEWLINEprint("The dropout rate was: ")NEWLINEprint(dr)NEWLINENEWLINENEWLINE# Fit the modelNEWLINE# history= model.fit(xtrain, ytrain_one_hot, epochs=nb_epoch, batch_size=batch_size, validation_data = (xtest, ytest_one_hot), callbacks = [keras.callbacks.ModelCheckpoint(filepath, monitor='val_loss', verbose=2, save_best_only=True, mode='auto'), keras.callbacks.EarlyStopping(monitor='val_loss', patience=5, verbose=2, mode='auto')])NEWLINEhistory = model_pose.fit(xtrain_15ft, ytrain_15ft_one_hot, epochs=nb_epoch, batch_size=batch_size, validation_split=0.1, callbacks=[NEWLINE keras.callbacks.ModelCheckpoint(filepath, monitor='val_loss', verbose=2, save_best_only=True, mode='auto'),NEWLINE keras.callbacks.EarlyStopping(monitor='val_loss', patience=5, verbose=2, mode='auto')])NEWLINENEWLINENEWLINE# Evaluate the modelNEWLINEloss, accuracy, f1_score, precision, recall = model_pose.evaluate(xtest_15ft, ytest_15ft_one_hot, batch_size=batch_size) # CHANGENEWLINEprint("\nTest Loss: %s: %.2f%%" % (model_pose.metrics_names[0], loss * 100)) # CHANGENEWLINEprint("\nTest Accuracy: %s: %.2f%%" % (model_pose.metrics_names[1], accuracy * 100)) # CHANGENEWLINEprint("\nTest F1 Score: %s: %.2f" % (model_pose.metrics_names[2], f1_score)) # CHANGENEWLINEprint("\nTest Precision: %s: %.2f%%" % (model_pose.metrics_names[3], precision * 100)) # CHANGENEWLINEprint("\nTest Recall: %s: %.2f%%" % (model_pose.metrics_names[4], recall * 100)) # CHANGENEWLINENEWLINE# Calculating total execution timeNEWLINEend_time = time.time() # Taking end time to calculate overall execution timeNEWLINEprint("\n Total Execution Time (Minutes): ")NEWLINEprint(((end_time - start_time) / 60))NEWLINENEWLINE#### SET PLOTTING PARAMETERS #########NEWLINEparams = {'legend.fontsize': 'xx-large',NEWLINE 'axes.labelsize': 'xx-large',NEWLINE 'axes.titlesize': 'xx-large',NEWLINE 'xtick.labelsize': 'xx-large',NEWLINE 'ytick.labelsize': 'xx-large'}NEWLINEplt.rcParams.update(params)NEWLINENEWLINENEWLINE# Show Accuracy CurvesNEWLINEfig = plt.figure()NEWLINE# plt.title('Training Performance')NEWLINEplt.plot(history.epoch, history.history['acc'], label='Training Accuracy', linewidth=2.0, c='b')NEWLINEplt.plot(history.epoch, history.history['val_acc'], label='Validation Accuracy', linewidth=2.0, c='r')NEWLINEplt.ylabel('Accuracy(%)')NEWLINEplt.xlabel('Epoch')NEWLINEplt.legend()NEWLINEplt.tight_layout()NEWLINEfig.savefig('/Users/debashri/Desktop/DirectionFinding_Plots/Indoor/15ft/direction_12_poses_acc_2D_CNN_Mapping.png') # save the figure to fileNEWLINEplt.close(fig)NEWLINENEWLINENEWLINE# plt.show()NEWLINENEWLINENEWLINE# Plot confusion matrixNEWLINEtest_Y_hat = model_pose.predict(xtest_15ft, batch_size=batch_size)NEWLINEconf = np.zeros([len(classes), len(classes)])NEWLINEconfnorm = np.zeros([len(classes), len(classes)])NEWLINEfor i in range(0, xtest_15ft.shape[0]):NEWLINE j = list(ytest_15ft_one_hot[i, :]).index(1)NEWLINE k = int(np.argmax(test_Y_hat[i, :]))NEWLINE conf[j, k] = conf[j, k] + 1NEWLINEplot_confusion_matrix(conf, labels=classes, normalize=False, filedest= '/Users/debashri/Desktop/DirectionFinding_Plots/Indoor/15ft/direction_12_poses_conf_mat_2D_CNN_Mapping.png')NEWLINE |
# TRANSMITTER HAS A DIRECTIONAL ANTENNA - POINTED IN 12 DIFFERENT POSESNEWLINE# RECEIVER HAS AN OMNI DIRECTIONAL ANTENNANEWLINE# DISTANCE BETWEEN RECEIVER AND TRANSMITTER - (5, 10, 15) FEETNEWLINE# IMPEMENTING HIERARCHICAL MACHINE LEARNINGNEWLINE# IMPLEMENTING TRANSFER LEARNINGNEWLINE# DATA COLLECTED IN INDOOR ENVIRONMENTNEWLINENEWLINE#############################################################NEWLINE# Pose Estimation and Ranging the RF Transmitter #NEWLINE# Neural Network for Direction Finding Data 2020 #NEWLINE# Author: Debashri Roy #NEWLINE#############################################################NEWLINENEWLINE############ IMPORTING NECESSARY PACKAGES ################NEWLINEimport numpy as np # Package for numerical computationNEWLINEnp.set_printoptions(threshold=np.inf) # To print each elementsNEWLINEimport time # Package is for computing execution timeNEWLINEimport sys # Package to get command line argumentsNEWLINEimport tensorflow as tfNEWLINEfrom sklearn.model_selection import train_test_splitNEWLINEfrom array import arrayNEWLINENEWLINE# by setting env variables before Keras import you can set up which backendNEWLINEimport os,randomNEWLINE#os.environ["KERAS_BACKEND"] = "theano"NEWLINEos.environ["KERAS_BACKEND"] = "tensorflow"NEWLINEos.environ["THEANO_FLAGS"] = "device=cuda0, dnn.enabled=False"NEWLINEimport theanoNEWLINE#theano.config.mode = ""NEWLINENEWLINENEWLINENEWLINENEWLINEimport theano as thNEWLINEimport theano.tensor as TNEWLINEfrom keras.utils import np_utilsNEWLINEimport keras.models as modelsNEWLINEfrom keras.models import SequentialNEWLINEfrom keras.layers.core import Reshape,Dense,Dropout,Activation,FlattenNEWLINEfrom keras.layers import EmbeddingNEWLINEfrom keras.layers.noise import GaussianNoiseNEWLINEfrom keras.layers.convolutional import Conv2D, Conv1D, Convolution2D, MaxPooling2D, ZeroPadding2D, Convolution1DNEWLINEfrom keras.regularizers import *NEWLINEfrom keras.optimizers import adam, Nadam, AdadeltaNEWLINEfrom keras.optimizers import Adam, RMSprop, AdagradNEWLINEfrom keras.layers.convolutional_recurrent import ConvLSTM2DNEWLINEfrom keras.optimizers import rmspropNEWLINEfrom keras.callbacks import ReduceLROnPlateau, ModelCheckpointNEWLINE#from keras.regularizers import l2, activity_l2NEWLINEfrom sklearn import preprocessingNEWLINEfrom sklearn.preprocessing import StandardScalerNEWLINEfrom keras.layers.advanced_activations import LeakyReLU, PReLUNEWLINE# import BatchNormalizationNEWLINEfrom keras.layers.normalization import BatchNormalizationNEWLINEfrom keras.layers import GRU, RNN, SimpleRNN, LSTM, GRUCell, SimpleRNNCell, LSTMCellNEWLINENEWLINEfrom sklearn.metrics import classification_reportNEWLINEfrom sklearn.metrics import confusion_matrixNEWLINENEWLINEfrom keras.utils.np_utils import to_categoricalNEWLINEfrom keras.optimizers import SGDNEWLINENEWLINEimport matplotlibNEWLINE#matplotlib.use('TkAgg')NEWLINEmatplotlib.use('Agg')NEWLINEimport matplotlib.pyplot as pltNEWLINE#import seaborn as snsNEWLINEimport kerasNEWLINEimport itertoolsNEWLINEimport scipyNEWLINENEWLINEfrom keras.models import load_modelNEWLINENEWLINE########## FUNCTIONS TO CALCULATE F SCORE OF THE MODEL ###############NEWLINEfrom keras import backend as KNEWLINEdef recall_m(y_true, y_pred):NEWLINE true_positives = K.sum(K.round(K.clip(y_true * y_pred, 0, 1)))NEWLINE possible_positives = K.sum(K.round(K.clip(y_true, 0, 1)))NEWLINE recall = true_positives / (possible_positives + K.epsilon())NEWLINE return recallNEWLINENEWLINENEWLINEdef precision_m(y_true, y_pred):NEWLINE true_positives = K.sum(K.round(K.clip(y_true * y_pred, 0, 1)))NEWLINE predicted_positives = K.sum(K.round(K.clip(y_pred, 0, 1)))NEWLINE precision = true_positives / (predicted_positives + K.epsilon())NEWLINE return precisionNEWLINENEWLINENEWLINEdef f1_m(y_true, y_pred):NEWLINE precision = precision_m(y_true, y_pred)NEWLINE recall = recall_m(y_true, y_pred)NEWLINE return 2 * ((precision * recall) / (precision + recall + K.epsilon()))NEWLINE######################################################################NEWLINENEWLINENEWLINE################# THE WEIGHT MATRIX #################3NEWLINEW = np.matrix([[np.cos(1*(np.pi/8)), np.sin(1*(np.pi/8))],NEWLINE[np.cos(2*(np.pi/8)), np.sin(2*(np.pi/8))],NEWLINE[np.cos(3*(np.pi/8)), np.sin(3*(np.pi/8))],NEWLINE[np.cos(4*(np.pi/8)), np.sin(4*(np.pi/8))],NEWLINE[np.cos(5*(np.pi/8)), np.sin(5*(np.pi/8))],NEWLINE[np.cos(6*(np.pi/8)), np.sin(6*(np.pi/8))],NEWLINE[np.cos(7*(np.pi/8)), np.sin(7*(np.pi/8))],NEWLINE[np.cos(8*(np.pi/8)), np.sin(8*(np.pi/8))]]NEWLINE)NEWLINENEWLINE# W = np.matrix([[np.cos(4*(np.pi/8)), np.sin(4*(np.pi/8))],NEWLINE# [np.cos(4*(np.pi/8)), np.sin(4*(np.pi/8))],NEWLINE# [np.cos(4*(np.pi/8)), np.sin(4*(np.pi/8))],NEWLINE# [np.cos(4*(np.pi/8)), np.sin(4*(np.pi/8))],NEWLINE# [np.cos(0*(np.pi/8)), np.sin(0*(np.pi/8))],NEWLINE# [np.cos(0*(np.pi/8)), np.sin(0*(np.pi/8))],NEWLINE# [np.cos(0*(np.pi/8)), np.sin(0*(np.pi/8))],NEWLINE# [np.cos(0*(np.pi/8)), np.sin(0*(np.pi/8))]]NEWLINE# )NEWLINENEWLINEprint(W)NEWLINENEWLINE# variablesNEWLINEdtype_all= scipy.dtype([('raw-iq', scipy.complex64)])NEWLINENEWLINENEWLINEsample_size = 1024 # CHANGE AND EXPERIMENT -512NEWLINEno_of_samples = 4000 # CHANGE AND EXPERIMENT - 4000NEWLINEno_of_features= 8 # CHANGE AND EXPERIMENTNEWLINEnumber_of_data_to_read = sample_size * no_of_samplesNEWLINENEWLINE#######################################################################################NEWLINENEWLINE#############################################################################################################################NEWLINE######## #######NEWLINE######## READING THE 5FT DATA #######NEWLINE######## #######NEWLINE#############################################################################################################################NEWLINENEWLINEdata_file_loc1 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/5ft/0_5ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER DIRECTLY POINTING TO THE RECEIVERNEWLINEdata_file_loc2 ='/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/5ft/+30_5ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER ANTENNA IS 30 DEGREE LEFT TO THE RECEIVERNEWLINEdata_file_loc3 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/5ft/+60_5ft_06_16_2020_914MHz_indoor.dat'# TRANSMITTER ANTENNA IS 60 DEGREE LEFT TO THE RECEIVERNEWLINEdata_file_loc4 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/5ft/+90_5ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER ANTENNA IS 90 DEGREE LEFT TO THE RECEIVERNEWLINENEWLINEdata_file_loc5 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/5ft/+120_5ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER ANTENNA IS 120 DEGREE LEFT TO THE RECEIVERNEWLINEdata_file_loc6 ='/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/5ft/+150_5ft_06_16_2020_914MHz_indoor.dat'# TRANSMITTER ANTENNA IS 150 DEGREE LEFT TO THE RECEIVERNEWLINEdata_file_loc7 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/5ft/180_5ft_06_16_2020_914MHz_indoor.dat'# TRANSMITTER ANTENNA IS DIRECTLY POINTED AWAY FROM THE RECEIVERNEWLINEdata_file_loc8 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/5ft/-150_5ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER ANTENNA IS 30 DEGREE RIGHT TO THE RECEIVERNEWLINENEWLINEdata_file_loc9 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/5ft/-120_5ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER ANTENNA IS 60 DEGREE RIGHT TO THE RECEIVERNEWLINEdata_file_loc10 ='/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/5ft/-90_5ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER ANTENNA IS 90 DEGREE RIGHT TO THE RECEIVERNEWLINEdata_file_loc11 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/5ft/-60_5ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER ANTENNA IS 120 DEGREE RIGHT TO THE RECEIVERNEWLINEdata_file_loc12 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/5ft/-30_5ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER ANTENNA IS 150 DEGREE RIGHT TO THE RECEIVERNEWLINENEWLINENEWLINENEWLINEiqdata_loc1 = scipy.fromfile(open(data_file_loc1), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc2 = scipy.fromfile(open(data_file_loc2), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc3 = scipy.fromfile(open(data_file_loc3), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc4 = scipy.fromfile(open(data_file_loc4), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINENEWLINEiqdata_loc5 = scipy.fromfile(open(data_file_loc5), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc6 = scipy.fromfile(open(data_file_loc6), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc7 = scipy.fromfile(open(data_file_loc7), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc8 = scipy.fromfile(open(data_file_loc8), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINENEWLINENEWLINEiqdata_loc9 = scipy.fromfile(open(data_file_loc9), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc10 = scipy.fromfile(open(data_file_loc10), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc11 = scipy.fromfile(open(data_file_loc11), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc12 = scipy.fromfile(open(data_file_loc12), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINENEWLINENEWLINENEWLINE# PREPARING THE DATA WITHOUT TIME INFORMATIONNEWLINEno_of_data_loc1 = iqdata_loc1.shape[0]NEWLINEno_of_data_loc2 = iqdata_loc2.shape[0]NEWLINEno_of_data_loc3 = iqdata_loc3.shape[0]NEWLINEno_of_data_loc4 = iqdata_loc4.shape[0]NEWLINENEWLINEno_of_data_loc5 = iqdata_loc5.shape[0]NEWLINEno_of_data_loc6 = iqdata_loc6.shape[0]NEWLINEno_of_data_loc7 = iqdata_loc7.shape[0]NEWLINEno_of_data_loc8 = iqdata_loc8.shape[0]NEWLINENEWLINEno_of_data_loc9 = iqdata_loc9.shape[0]NEWLINEno_of_data_loc10 = iqdata_loc10.shape[0]NEWLINEno_of_data_loc11 = iqdata_loc11.shape[0]NEWLINEno_of_data_loc12 = iqdata_loc12.shape[0]NEWLINENEWLINENEWLINENEWLINE################################################################################################################NEWLINE# CONCATINATING THE I AND Q VALUES VERTICALLY OF (I, Q) SAMPLE. -- note the axis argument is set to 1 (means vertical stacking)NEWLINE# SIMULATNEOUSLY MULTIPLYING WITH THE WEIGHT MATRIX - TO REFLECT THE MULTI-ANGULAR PROJECTIONNEWLINENEWLINExdata_loc1= np.concatenate([iqdata_loc1['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc1['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc1 = np.matmul(xdata_loc1, np.transpose(W))NEWLINENEWLINENEWLINExdata_loc2= np.concatenate([iqdata_loc2['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc2['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc2 = np.matmul(xdata_loc2, np.transpose(W))NEWLINENEWLINENEWLINExdata_loc3= np.concatenate([iqdata_loc3['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc3['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc3 = np.matmul(xdata_loc3, np.transpose(W))NEWLINENEWLINENEWLINExdata_loc4= np.concatenate([iqdata_loc4['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc4['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc4 = np.matmul(xdata_loc4, np.transpose(W))NEWLINENEWLINENEWLINENEWLINENEWLINExdata_loc5= np.concatenate([iqdata_loc5['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc5['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc5 = np.matmul(xdata_loc5, np.transpose(W))NEWLINENEWLINExdata_loc6= np.concatenate([iqdata_loc6['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc6['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc6 = np.matmul(xdata_loc6, np.transpose(W))NEWLINENEWLINENEWLINExdata_loc7= np.concatenate([iqdata_loc7['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc7['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc7 = np.matmul(xdata_loc7, np.transpose(W))NEWLINENEWLINENEWLINExdata_loc8= np.concatenate([iqdata_loc8['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc8['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc8 = np.matmul(xdata_loc8, np.transpose(W))NEWLINENEWLINENEWLINENEWLINENEWLINExdata_loc9= np.concatenate([iqdata_loc9['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc9['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc9 = np.matmul(xdata_loc9, np.transpose(W))NEWLINENEWLINENEWLINExdata_loc10= np.concatenate([iqdata_loc10['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc10['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc10 = np.matmul(xdata_loc10, np.transpose(W))NEWLINENEWLINENEWLINExdata_loc11= np.concatenate([iqdata_loc11['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc11['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc11 = np.matmul(xdata_loc11, np.transpose(W))NEWLINENEWLINENEWLINExdata_loc12= np.concatenate([iqdata_loc12['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc12['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc12 = np.matmul(xdata_loc12, np.transpose(W))NEWLINENEWLINENEWLINENEWLINENEWLINE# RESHAPING THE XDATANEWLINExdata_loc1= xdata_loc1.T.reshape(no_of_data_loc1//(sample_size), sample_size*no_of_features)NEWLINExdata_loc2 = xdata_loc2.T.reshape(no_of_data_loc2//(sample_size), sample_size*no_of_features)NEWLINExdata_loc3 = xdata_loc3.T.reshape(no_of_data_loc3//(sample_size), sample_size*no_of_features)NEWLINExdata_loc4 = xdata_loc4.T.reshape(no_of_data_loc4//(sample_size), sample_size*no_of_features)NEWLINENEWLINExdata_loc5= xdata_loc5.T.reshape(no_of_data_loc5//(sample_size), sample_size*no_of_features)NEWLINExdata_loc6 = xdata_loc6.T.reshape(no_of_data_loc6//(sample_size), sample_size*no_of_features)NEWLINExdata_loc7 = xdata_loc7.T.reshape(no_of_data_loc7//(sample_size), sample_size*no_of_features)NEWLINExdata_loc8 = xdata_loc8.T.reshape(no_of_data_loc8//(sample_size), sample_size*no_of_features)NEWLINENEWLINExdata_loc9= xdata_loc9.T.reshape(no_of_data_loc9//(sample_size), sample_size*no_of_features)NEWLINExdata_loc10 = xdata_loc10.T.reshape(no_of_data_loc10//(sample_size), sample_size*no_of_features)NEWLINExdata_loc11 = xdata_loc11.T.reshape(no_of_data_loc11//(sample_size), sample_size*no_of_features)NEWLINExdata_loc12 = xdata_loc12.T.reshape(no_of_data_loc12//(sample_size), sample_size*no_of_features)NEWLINENEWLINENEWLINENEWLINE#CONCATINATING THE DIFFERENT POSE DATA HORIZONTALLY (ROWWISE)NEWLINExdata = np.concatenate([xdata_loc1, xdata_loc2, xdata_loc3, xdata_loc4, xdata_loc5, xdata_loc6, xdata_loc7, xdata_loc8, xdata_loc9, xdata_loc10, xdata_loc11, xdata_loc12], axis=0)NEWLINENEWLINENEWLINENEWLINE# CREATING LABEL FOR THE DATASETSNEWLINEydata_loc1 = np.full(xdata_loc1.shape[0], 0, dtype=int)NEWLINEydata_loc2 = np.full(xdata_loc2.shape[0], 1, dtype=int)NEWLINEydata_loc3 = np.full(xdata_loc3.shape[0], 2, dtype=int)NEWLINEydata_loc4 = np.full(xdata_loc4.shape[0], 3, dtype=int)NEWLINENEWLINEydata_loc5 = np.full(xdata_loc5.shape[0], 4, dtype=int)NEWLINEydata_loc6 = np.full(xdata_loc6.shape[0], 5, dtype=int)NEWLINEydata_loc7 = np.full(xdata_loc7.shape[0], 6, dtype=int)NEWLINEydata_loc8 = np.full(xdata_loc8.shape[0], 7, dtype=int)NEWLINENEWLINEydata_loc9 = np.full(xdata_loc9.shape[0], 8, dtype=int)NEWLINEydata_loc10 = np.full(xdata_loc10.shape[0], 9, dtype=int)NEWLINEydata_loc11 = np.full(xdata_loc11.shape[0], 10, dtype=int)NEWLINEydata_loc12 = np.full(xdata_loc12.shape[0], 11, dtype=int)NEWLINENEWLINE#CONCATINATING THE DIFFERENT POSE LABELS HORIZONTALLY (ROWWISE)NEWLINEydata = np.concatenate([ydata_loc1, ydata_loc2, ydata_loc3, ydata_loc4, ydata_loc5, ydata_loc6, ydata_loc7, ydata_loc8, ydata_loc9, ydata_loc10, ydata_loc11, ydata_loc12], axis=0)NEWLINENEWLINENEWLINE# PREPROCESSING X AND Y DATANEWLINExdata =xdata.astype(np.float)NEWLINENEWLINEydata = ydata.astype(np.int).flatten()NEWLINENEWLINE# REMOVING THE NANSNEWLINExdata = np.nan_to_num(xdata)NEWLINENEWLINENEWLINE# ############## RANDOMLY SHUFFLING THE DATA ###################NEWLINE#NEWLINE# first concatinate - TO MAINTIAN THE XDATA AND YDATA MAPPINGNEWLINExydata = np.concatenate([xdata.reshape(xdata.shape[0], xdata.shape[1]), ydata.reshape(ydata.shape[0], 1)], axis=1)NEWLINENEWLINEnp.random.shuffle(xydata)NEWLINENEWLINEprint("Shape of XYDATA", xydata.shape)NEWLINENEWLINExdata_5ft, ydata_5ft = xydata[:,0:sample_size*no_of_features], xydata[:,((sample_size*no_of_features))] # THE LAST COLUMN IS THE YDATA # USE 2 INSTEAD OF 8 OF YOU DO NOT USE MULTI-ANGULAR PROJECTIONNEWLINENEWLINENEWLINE################################################################################################################################NEWLINENEWLINE#############################################################################################################################NEWLINE######## #######NEWLINE######## READING THE 10FT DATA #######NEWLINE######## #######NEWLINE#############################################################################################################################NEWLINENEWLINEdata_file_loc1 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/10ft/0_10ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER DIRECTLY POINTING TO THE RECEIVERNEWLINEdata_file_loc2 ='/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/10ft/+30_10ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER ANTENNA IS 30 DEGREE LEFT TO THE RECEIVERNEWLINEdata_file_loc3 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/10ft/+60_10ft_06_16_2020_914MHz_indoor.dat'# TRANSMITTER ANTENNA IS 60 DEGREE LEFT TO THE RECEIVERNEWLINEdata_file_loc4 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/10ft/+90_10ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER ANTENNA IS 90 DEGREE LEFT TO THE RECEIVERNEWLINENEWLINEdata_file_loc5 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/10ft/+120_10ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER ANTENNA IS 120 DEGREE LEFT TO THE RECEIVERNEWLINEdata_file_loc6 ='/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/10ft/+150_10ft_06_16_2020_914MHz_indoor.dat'# TRANSMITTER ANTENNA IS 150 DEGREE LEFT TO THE RECEIVERNEWLINEdata_file_loc7 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/10ft/180_10ft_06_16_2020_914MHz_indoor.dat'# TRANSMITTER ANTENNA IS DIRECTLY POINTED AWAY FROM THE RECEIVERNEWLINEdata_file_loc8 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/10ft/-150_10ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER ANTENNA IS 30 DEGREE RIGHT TO THE RECEIVERNEWLINENEWLINEdata_file_loc9 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/10ft/-120_10ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER ANTENNA IS 60 DEGREE RIGHT TO THE RECEIVERNEWLINEdata_file_loc10 ='/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/10ft/-90_10ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER ANTENNA IS 90 DEGREE RIGHT TO THE RECEIVERNEWLINEdata_file_loc11 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/10ft/-60_10ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER ANTENNA IS 120 DEGREE RIGHT TO THE RECEIVERNEWLINEdata_file_loc12 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/10ft/-30_10ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER ANTENNA IS 150 DEGREE RIGHT TO THE RECEIVERNEWLINENEWLINENEWLINENEWLINEiqdata_loc1 = scipy.fromfile(open(data_file_loc1), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc2 = scipy.fromfile(open(data_file_loc2), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc3 = scipy.fromfile(open(data_file_loc3), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc4 = scipy.fromfile(open(data_file_loc4), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINENEWLINEiqdata_loc5 = scipy.fromfile(open(data_file_loc5), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc6 = scipy.fromfile(open(data_file_loc6), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc7 = scipy.fromfile(open(data_file_loc7), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc8 = scipy.fromfile(open(data_file_loc8), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINENEWLINENEWLINEiqdata_loc9 = scipy.fromfile(open(data_file_loc9), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc10 = scipy.fromfile(open(data_file_loc10), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc11 = scipy.fromfile(open(data_file_loc11), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc12 = scipy.fromfile(open(data_file_loc12), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINENEWLINENEWLINENEWLINE# PREPARING THE DATA WITHOUT TIME INFORMATIONNEWLINEno_of_data_loc1 = iqdata_loc1.shape[0]NEWLINEno_of_data_loc2 = iqdata_loc2.shape[0]NEWLINEno_of_data_loc3 = iqdata_loc3.shape[0]NEWLINEno_of_data_loc4 = iqdata_loc4.shape[0]NEWLINENEWLINEno_of_data_loc5 = iqdata_loc5.shape[0]NEWLINEno_of_data_loc6 = iqdata_loc6.shape[0]NEWLINEno_of_data_loc7 = iqdata_loc7.shape[0]NEWLINEno_of_data_loc8 = iqdata_loc8.shape[0]NEWLINENEWLINEno_of_data_loc9 = iqdata_loc9.shape[0]NEWLINEno_of_data_loc10 = iqdata_loc10.shape[0]NEWLINEno_of_data_loc11 = iqdata_loc11.shape[0]NEWLINEno_of_data_loc12 = iqdata_loc12.shape[0]NEWLINENEWLINENEWLINENEWLINE################################################################################################################NEWLINE# CONCATINATING THE I AND Q VALUES VERTICALLY OF (I, Q) SAMPLE. -- note the axis argument is set to 1 (means vertical stacking)NEWLINE# SIMULATNEOUSLY MULTIPLYING WITH THE WEIGHT MATRIX - TO REFLECT THE MULTI-ANGULAR PROJECTIONNEWLINENEWLINExdata_loc1= np.concatenate([iqdata_loc1['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc1['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc1 = np.matmul(xdata_loc1, np.transpose(W))NEWLINENEWLINENEWLINExdata_loc2= np.concatenate([iqdata_loc2['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc2['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc2 = np.matmul(xdata_loc2, np.transpose(W))NEWLINENEWLINENEWLINExdata_loc3= np.concatenate([iqdata_loc3['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc3['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc3 = np.matmul(xdata_loc3, np.transpose(W))NEWLINENEWLINENEWLINExdata_loc4= np.concatenate([iqdata_loc4['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc4['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc4 = np.matmul(xdata_loc4, np.transpose(W))NEWLINENEWLINENEWLINENEWLINENEWLINExdata_loc5= np.concatenate([iqdata_loc5['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc5['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc5 = np.matmul(xdata_loc5, np.transpose(W))NEWLINENEWLINExdata_loc6= np.concatenate([iqdata_loc6['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc6['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc6 = np.matmul(xdata_loc6, np.transpose(W))NEWLINENEWLINENEWLINExdata_loc7= np.concatenate([iqdata_loc7['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc7['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc7 = np.matmul(xdata_loc7, np.transpose(W))NEWLINENEWLINENEWLINExdata_loc8= np.concatenate([iqdata_loc8['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc8['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc8 = np.matmul(xdata_loc8, np.transpose(W))NEWLINENEWLINENEWLINENEWLINENEWLINExdata_loc9= np.concatenate([iqdata_loc9['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc9['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc9 = np.matmul(xdata_loc9, np.transpose(W))NEWLINENEWLINENEWLINExdata_loc10= np.concatenate([iqdata_loc10['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc10['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc10 = np.matmul(xdata_loc10, np.transpose(W))NEWLINENEWLINENEWLINExdata_loc11= np.concatenate([iqdata_loc11['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc11['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc11 = np.matmul(xdata_loc11, np.transpose(W))NEWLINENEWLINENEWLINExdata_loc12= np.concatenate([iqdata_loc12['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc12['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc12 = np.matmul(xdata_loc12, np.transpose(W))NEWLINENEWLINENEWLINENEWLINENEWLINE# RESHAPING THE XDATANEWLINExdata_loc1= xdata_loc1.T.reshape(no_of_data_loc1//(sample_size), sample_size*no_of_features)NEWLINExdata_loc2 = xdata_loc2.T.reshape(no_of_data_loc2//(sample_size), sample_size*no_of_features)NEWLINExdata_loc3 = xdata_loc3.T.reshape(no_of_data_loc3//(sample_size), sample_size*no_of_features)NEWLINExdata_loc4 = xdata_loc4.T.reshape(no_of_data_loc4//(sample_size), sample_size*no_of_features)NEWLINENEWLINExdata_loc5= xdata_loc5.T.reshape(no_of_data_loc5//(sample_size), sample_size*no_of_features)NEWLINExdata_loc6 = xdata_loc6.T.reshape(no_of_data_loc6//(sample_size), sample_size*no_of_features)NEWLINExdata_loc7 = xdata_loc7.T.reshape(no_of_data_loc7//(sample_size), sample_size*no_of_features)NEWLINExdata_loc8 = xdata_loc8.T.reshape(no_of_data_loc8//(sample_size), sample_size*no_of_features)NEWLINENEWLINExdata_loc9= xdata_loc9.T.reshape(no_of_data_loc9//(sample_size), sample_size*no_of_features)NEWLINExdata_loc10 = xdata_loc10.T.reshape(no_of_data_loc10//(sample_size), sample_size*no_of_features)NEWLINExdata_loc11 = xdata_loc11.T.reshape(no_of_data_loc11//(sample_size), sample_size*no_of_features)NEWLINExdata_loc12 = xdata_loc12.T.reshape(no_of_data_loc12//(sample_size), sample_size*no_of_features)NEWLINENEWLINENEWLINENEWLINE#CONCATINATING THE DIFFERENT POSE DATA HORIZONTALLY (ROWWISE)NEWLINExdata = np.concatenate([xdata_loc1, xdata_loc2, xdata_loc3, xdata_loc4, xdata_loc5, xdata_loc6, xdata_loc7, xdata_loc8, xdata_loc9, xdata_loc10, xdata_loc11, xdata_loc12], axis=0)NEWLINENEWLINENEWLINENEWLINE# CREATING LABEL FOR THE DATASETSNEWLINEydata_loc1 = np.full(xdata_loc1.shape[0], 0, dtype=int)NEWLINEydata_loc2 = np.full(xdata_loc2.shape[0], 1, dtype=int)NEWLINEydata_loc3 = np.full(xdata_loc3.shape[0], 2, dtype=int)NEWLINEydata_loc4 = np.full(xdata_loc4.shape[0], 3, dtype=int)NEWLINENEWLINEydata_loc5 = np.full(xdata_loc5.shape[0], 4, dtype=int)NEWLINEydata_loc6 = np.full(xdata_loc6.shape[0], 5, dtype=int)NEWLINEydata_loc7 = np.full(xdata_loc7.shape[0], 6, dtype=int)NEWLINEydata_loc8 = np.full(xdata_loc8.shape[0], 7, dtype=int)NEWLINENEWLINEydata_loc9 = np.full(xdata_loc9.shape[0], 8, dtype=int)NEWLINEydata_loc10 = np.full(xdata_loc10.shape[0], 9, dtype=int)NEWLINEydata_loc11 = np.full(xdata_loc11.shape[0], 10, dtype=int)NEWLINEydata_loc12 = np.full(xdata_loc12.shape[0], 11, dtype=int)NEWLINENEWLINE#CONCATINATING THE DIFFERENT POSE LABELS HORIZONTALLY (ROWWISE)NEWLINEydata = np.concatenate([ydata_loc1, ydata_loc2, ydata_loc3, ydata_loc4, ydata_loc5, ydata_loc6, ydata_loc7, ydata_loc8, ydata_loc9, ydata_loc10, ydata_loc11, ydata_loc12], axis=0)NEWLINENEWLINENEWLINE# PREPROCESSING X AND Y DATANEWLINExdata =xdata.astype(np.float)NEWLINENEWLINEydata = ydata.astype(np.int).flatten()NEWLINENEWLINE# REMOVING THE NANSNEWLINExdata = np.nan_to_num(xdata)NEWLINENEWLINENEWLINE# ############## RANDOMLY SHUFFLING THE DATA ###################NEWLINE#NEWLINE# first concatinate - TO MAINTIAN THE XDATA AND YDATA MAPPINGNEWLINExydata = np.concatenate([xdata.reshape(xdata.shape[0], xdata.shape[1]), ydata.reshape(ydata.shape[0], 1)], axis=1)NEWLINENEWLINEnp.random.shuffle(xydata)NEWLINENEWLINEprint("Shape of XYDATA", xydata.shape)NEWLINENEWLINExdata_10ft, ydata_10ft = xydata[:,0:sample_size*no_of_features], xydata[:,((sample_size*no_of_features))] # THE LAST COLUMN IS THE YDATA # USE 2 INSTEAD OF 8 OF YOU DO NOT USE MULTI-ANGULAR PROJECTIONNEWLINENEWLINENEWLINE################################################################################################################################NEWLINENEWLINE#############################################################################################################################NEWLINE######## #######NEWLINE######## READING THE 15FT DATA #######NEWLINE######## #######NEWLINE#############################################################################################################################NEWLINENEWLINEdata_file_loc1 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/15ft/0_15ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER DIRECTLY POINTING TO THE RECEIVERNEWLINEdata_file_loc2 ='/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/15ft/+30_15ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER ANTENNA IS 30 DEGREE LEFT TO THE RECEIVERNEWLINEdata_file_loc3 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/15ft/+60_15ft_06_16_2020_914MHz_indoor.dat'# TRANSMITTER ANTENNA IS 60 DEGREE LEFT TO THE RECEIVERNEWLINEdata_file_loc4 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/15ft/+90_15ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER ANTENNA IS 90 DEGREE LEFT TO THE RECEIVERNEWLINENEWLINEdata_file_loc5 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/15ft/+120_15ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER ANTENNA IS 120 DEGREE LEFT TO THE RECEIVERNEWLINEdata_file_loc6 ='/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/15ft/+150_15ft_06_16_2020_914MHz_indoor.dat'# TRANSMITTER ANTENNA IS 150 DEGREE LEFT TO THE RECEIVERNEWLINEdata_file_loc7 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/15ft/180_15ft_06_16_2020_914MHz_indoor.dat'# TRANSMITTER ANTENNA IS DIRECTLY POINTED AWAY FROM THE RECEIVERNEWLINEdata_file_loc8 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/15ft/-150_15ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER ANTENNA IS 30 DEGREE RIGHT TO THE RECEIVERNEWLINENEWLINEdata_file_loc9 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/15ft/-120_15ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER ANTENNA IS 60 DEGREE RIGHT TO THE RECEIVERNEWLINEdata_file_loc10 ='/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/15ft/-90_15ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER ANTENNA IS 90 DEGREE RIGHT TO THE RECEIVERNEWLINEdata_file_loc11 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/15ft/-60_15ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER ANTENNA IS 120 DEGREE RIGHT TO THE RECEIVERNEWLINEdata_file_loc12 = '/Users/debashri/Desktop/DirectionFinding_Data/Indoor/DataJune16Indoor/15ft/-30_15ft_06_16_2020_914MHz_indoor.dat' # TRANSMITTER ANTENNA IS 150 DEGREE RIGHT TO THE RECEIVERNEWLINENEWLINENEWLINENEWLINEiqdata_loc1 = scipy.fromfile(open(data_file_loc1), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc2 = scipy.fromfile(open(data_file_loc2), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc3 = scipy.fromfile(open(data_file_loc3), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc4 = scipy.fromfile(open(data_file_loc4), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINENEWLINEiqdata_loc5 = scipy.fromfile(open(data_file_loc5), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc6 = scipy.fromfile(open(data_file_loc6), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc7 = scipy.fromfile(open(data_file_loc7), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc8 = scipy.fromfile(open(data_file_loc8), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINENEWLINENEWLINEiqdata_loc9 = scipy.fromfile(open(data_file_loc9), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc10 = scipy.fromfile(open(data_file_loc10), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc11 = scipy.fromfile(open(data_file_loc11), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINEiqdata_loc12 = scipy.fromfile(open(data_file_loc12), dtype=dtype_all, count = sample_size * no_of_samples)NEWLINENEWLINENEWLINENEWLINE# PREPARING THE DATA WITHOUT TIME INFORMATIONNEWLINEno_of_data_loc1 = iqdata_loc1.shape[0]NEWLINEno_of_data_loc2 = iqdata_loc2.shape[0]NEWLINEno_of_data_loc3 = iqdata_loc3.shape[0]NEWLINEno_of_data_loc4 = iqdata_loc4.shape[0]NEWLINENEWLINEno_of_data_loc5 = iqdata_loc5.shape[0]NEWLINEno_of_data_loc6 = iqdata_loc6.shape[0]NEWLINEno_of_data_loc7 = iqdata_loc7.shape[0]NEWLINEno_of_data_loc8 = iqdata_loc8.shape[0]NEWLINENEWLINEno_of_data_loc9 = iqdata_loc9.shape[0]NEWLINEno_of_data_loc10 = iqdata_loc10.shape[0]NEWLINEno_of_data_loc11 = iqdata_loc11.shape[0]NEWLINEno_of_data_loc12 = iqdata_loc12.shape[0]NEWLINENEWLINENEWLINENEWLINE################################################################################################################NEWLINE# CONCATINATING THE I AND Q VALUES VERTICALLY OF (I, Q) SAMPLE. -- note the axis argument is set to 1 (means vertical stacking)NEWLINE# SIMULATNEOUSLY MULTIPLYING WITH THE WEIGHT MATRIX - TO REFLECT THE MULTI-ANGULAR PROJECTIONNEWLINENEWLINExdata_loc1= np.concatenate([iqdata_loc1['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc1['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc1 = np.matmul(xdata_loc1, np.transpose(W))NEWLINENEWLINENEWLINExdata_loc2= np.concatenate([iqdata_loc2['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc2['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc2 = np.matmul(xdata_loc2, np.transpose(W))NEWLINENEWLINENEWLINExdata_loc3= np.concatenate([iqdata_loc3['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc3['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc3 = np.matmul(xdata_loc3, np.transpose(W))NEWLINENEWLINENEWLINExdata_loc4= np.concatenate([iqdata_loc4['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc4['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc4 = np.matmul(xdata_loc4, np.transpose(W))NEWLINENEWLINENEWLINENEWLINENEWLINExdata_loc5= np.concatenate([iqdata_loc5['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc5['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc5 = np.matmul(xdata_loc5, np.transpose(W))NEWLINENEWLINExdata_loc6= np.concatenate([iqdata_loc6['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc6['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc6 = np.matmul(xdata_loc6, np.transpose(W))NEWLINENEWLINENEWLINExdata_loc7= np.concatenate([iqdata_loc7['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc7['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc7 = np.matmul(xdata_loc7, np.transpose(W))NEWLINENEWLINENEWLINExdata_loc8= np.concatenate([iqdata_loc8['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc8['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc8 = np.matmul(xdata_loc8, np.transpose(W))NEWLINENEWLINENEWLINENEWLINENEWLINExdata_loc9= np.concatenate([iqdata_loc9['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc9['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc9 = np.matmul(xdata_loc9, np.transpose(W))NEWLINENEWLINENEWLINExdata_loc10= np.concatenate([iqdata_loc10['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc10['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc10 = np.matmul(xdata_loc10, np.transpose(W))NEWLINENEWLINENEWLINExdata_loc11= np.concatenate([iqdata_loc11['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc11['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc11 = np.matmul(xdata_loc11, np.transpose(W))NEWLINENEWLINENEWLINExdata_loc12= np.concatenate([iqdata_loc12['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc12['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)NEWLINExdata_loc12 = np.matmul(xdata_loc12, np.transpose(W))NEWLINENEWLINENEWLINENEWLINENEWLINE# RESHAPING THE XDATANEWLINExdata_loc1= xdata_loc1.T.reshape(no_of_data_loc1//(sample_size), sample_size*no_of_features)NEWLINExdata_loc2 = xdata_loc2.T.reshape(no_of_data_loc2//(sample_size), sample_size*no_of_features)NEWLINExdata_loc3 = xdata_loc3.T.reshape(no_of_data_loc3//(sample_size), sample_size*no_of_features)NEWLINExdata_loc4 = xdata_loc4.T.reshape(no_of_data_loc4//(sample_size), sample_size*no_of_features)NEWLINENEWLINExdata_loc5= xdata_loc5.T.reshape(no_of_data_loc5//(sample_size), sample_size*no_of_features)NEWLINExdata_loc6 = xdata_loc6.T.reshape(no_of_data_loc6//(sample_size), sample_size*no_of_features)NEWLINExdata_loc7 = xdata_loc7.T.reshape(no_of_data_loc7//(sample_size), sample_size*no_of_features)NEWLINExdata_loc8 = xdata_loc8.T.reshape(no_of_data_loc8//(sample_size), sample_size*no_of_features)NEWLINENEWLINExdata_loc9= xdata_loc9.T.reshape(no_of_data_loc9//(sample_size), sample_size*no_of_features)NEWLINExdata_loc10 = xdata_loc10.T.reshape(no_of_data_loc10//(sample_size), sample_size*no_of_features)NEWLINExdata_loc11 = xdata_loc11.T.reshape(no_of_data_loc11//(sample_size), sample_size*no_of_features)NEWLINExdata_loc12 = xdata_loc12.T.reshape(no_of_data_loc12//(sample_size), sample_size*no_of_features)NEWLINENEWLINENEWLINENEWLINE#CONCATINATING THE DIFFERENT POSE DATA HORIZONTALLY (ROWWISE)NEWLINExdata = np.concatenate([xdata_loc1, xdata_loc2, xdata_loc3, xdata_loc4, xdata_loc5, xdata_loc6, xdata_loc7, xdata_loc8, xdata_loc9, xdata_loc10, xdata_loc11, xdata_loc12], axis=0)NEWLINENEWLINENEWLINENEWLINE# CREATING LABEL FOR THE DATASETSNEWLINEydata_loc1 = np.full(xdata_loc1.shape[0], 0, dtype=int)NEWLINEydata_loc2 = np.full(xdata_loc2.shape[0], 1, dtype=int)NEWLINEydata_loc3 = np.full(xdata_loc3.shape[0], 2, dtype=int)NEWLINEydata_loc4 = np.full(xdata_loc4.shape[0], 3, dtype=int)NEWLINENEWLINEydata_loc5 = np.full(xdata_loc5.shape[0], 4, dtype=int)NEWLINEydata_loc6 = np.full(xdata_loc6.shape[0], 5, dtype=int)NEWLINEydata_loc7 = np.full(xdata_loc7.shape[0], 6, dtype=int)NEWLINEydata_loc8 = np.full(xdata_loc8.shape[0], 7, dtype=int)NEWLINENEWLINEydata_loc9 = np.full(xdata_loc9.shape[0], 8, dtype=int)NEWLINEydata_loc10 = np.full(xdata_loc10.shape[0], 9, dtype=int)NEWLINEydata_loc11 = np.full(xdata_loc11.shape[0], 10, dtype=int)NEWLINEydata_loc12 = np.full(xdata_loc12.shape[0], 11, dtype=int)NEWLINENEWLINE#CONCATINATING THE DIFFERENT POSE LABELS HORIZONTALLY (ROWWISE)NEWLINEydata = np.concatenate([ydata_loc1, ydata_loc2, ydata_loc3, ydata_loc4, ydata_loc5, ydata_loc6, ydata_loc7, ydata_loc8, ydata_loc9, ydata_loc10, ydata_loc11, ydata_loc12], axis=0)NEWLINENEWLINENEWLINE# PREPROCESSING X AND Y DATANEWLINExdata =xdata.astype(np.float)NEWLINENEWLINEydata = ydata.astype(np.int).flatten()NEWLINENEWLINE# REMOVING THE NANSNEWLINExdata = np.nan_to_num(xdata)NEWLINENEWLINENEWLINE# ############## RANDOMLY SHUFFLING THE DATA ###################NEWLINE#NEWLINE# first concatinate - TO MAINTIAN THE XDATA AND YDATA MAPPINGNEWLINExydata = np.concatenate([xdata.reshape(xdata.shape[0], xdata.shape[1]), ydata.reshape(ydata.shape[0], 1)], axis=1)NEWLINENEWLINEnp.random.shuffle(xydata)NEWLINENEWLINEprint("Shape of XYDATA", xydata.shape)NEWLINENEWLINExdata_15ft, ydata_15ft = xydata[:,0:sample_size*no_of_features], xydata[:,((sample_size*no_of_features))] # THE LAST COLUMN IS THE YDATA # USE 2 INSTEAD OF 8 OF YOU DO NOT USE MULTI-ANGULAR PROJECTIONNEWLINENEWLINENEWLINE################################################################################################################################NEWLINENEWLINE#############################################################################################################################NEWLINE######## #######NEWLINE######## HIERARCHICAL TRAINING- FIRST STEP #######NEWLINE######## FIRST CLASSIFYING THE DATA BASED ON DISTANCES #######NEWLINE######## PREDICTING DISTANCE BETWEEN THE RECEIVER AND TRANSMITTER #######NEWLINE######## #######NEWLINE#############################################################################################################################NEWLINENEWLINENEWLINExdata_ranging = np.concatenate([xdata_5ft, xdata_10ft, xdata_15ft], axis= 0 )NEWLINENEWLINE# CREATING LABEL FOR THE DATASETSNEWLINEydata_range1 = np.full(xdata_5ft.shape[0], 0, dtype=int)NEWLINEydata_range2 = np.full(xdata_10ft.shape[0], 1, dtype=int)NEWLINEydata_range3 = np.full(xdata_15ft.shape[0], 2, dtype=int)NEWLINENEWLINE#CONCATINATING THE DIFFERENT POSE LABELS HORIZONTALLY (ROWWISE)NEWLINEydata_ranging = np.concatenate([ydata_range1, ydata_range2, ydata_range3], axis=0)NEWLINENEWLINE#################### NORMALIZE THE X DATA #######################NEWLINENEWLINENEWLINEstandard = preprocessing.StandardScaler().fit(xdata_ranging) # Normalize the data with zero mean and unit variance for each columnNEWLINExdata_ranging = standard.transform(xdata_ranging)NEWLINENEWLINENEWLINENEWLINE############### SEPARATING TRAIN AND TEST DATA #######################NEWLINEprint("############## STARTING THE TRAINING TO PREDICT THE RANGE BETWEEN RECEIVER AND TRANSMITTER ##########################")NEWLINENEWLINExtrain_ranging, xtest_ranging, ytrain_ranging, ytest_ranging = train_test_split(xdata_ranging, ydata_ranging, test_size=0.2, shuffle = True, random_state=42) # Randomly shuffling and 80/20 is train/test sizeNEWLINEprint("XTRAIN AND XTEST SHAPE:", xtrain_ranging.shape, xtest_ranging.shape)NEWLINEprint("YTRAIN AND YTEST SHAPE:", ytrain_ranging.shape, ytest_ranging.shape)NEWLINENEWLINE# RESHAPING THE DATA FROM 2 DIMENSIONAL TO 4 DIMENSIONAL SHAPE - NEEDED TO APPLY TO USE 2D-CONVOLUTIONNEWLINE# reshape to be [samples][width][height][channels]NEWLINExtrain_ranging = xtrain_ranging.reshape((xtrain_ranging.shape[0], no_of_features, sample_size, 1)).astype('float32')NEWLINExtest_ranging = xtest_ranging.reshape((xtest_ranging.shape[0], no_of_features, sample_size, 1)).astype('float32')NEWLINENEWLINENEWLINEnum_classes = 3 # TOTAL NUMBER OF RANGESNEWLINENEWLINENEWLINENEWLINE# Convert labels to categorical one-hot encodingNEWLINEytrain_ranging_one_hot = to_categorical(ytrain_ranging, num_classes=num_classes) # DEFINE THE NUMBER OF TOTAL CLASSES IN LABELNEWLINEytest_ranging_one_hot = to_categorical(ytest_ranging, num_classes=num_classes)NEWLINENEWLINENEWLINEprint("XTRAIN AND XTEST SHAPE:", xtrain_ranging.shape, xtest_ranging.shape)NEWLINEprint("YTRAIN AND YTEST SHAPE:", ytrain_ranging_one_hot.shape, ytest_ranging_one_hot.shape)NEWLINENEWLINE############################################################NEWLINE# #NEWLINE######## Building a 2D Convolutional Neural Network #####NEWLINE# #NEWLINE############################################################NEWLINENEWLINEdr = 0.6 # dropout rate (%)NEWLINEbatch_size = 128 # Mini batch sizeNEWLINEnb_epoch = 100 # Number of Epoch (Give a higher number to get better accuracy)NEWLINENEWLINEclasses = ["5ft", "10ft", "15ft"] # CHANGE LABELNEWLINEin_shp = list(xtrain_ranging.shape[1:]) # Input DimensionNEWLINEprint(in_shp)NEWLINE# model = models.Sequential()NEWLINEtimesteps=1NEWLINEdata_dim=xtrain_ranging.shape[1]NEWLINENEWLINENEWLINENEWLINE# print ("AFTER RESHAPE")NEWLINEytrain_ranging_one_hot = np.reshape(ytrain_ranging_one_hot, (ytrain_ranging_one_hot.shape[0], num_classes)) # Used in trainingNEWLINEytest_ranging_one_hot = np.reshape(ytest_ranging_one_hot, (ytest_ranging_one_hot.shape[0], num_classes)) # Used in trainingNEWLINENEWLINEstart_time = time.time() # Taking start time to calculate overall execution timeNEWLINENEWLINE# Modeling the CNNNEWLINEmodel_ranging = Sequential()NEWLINENEWLINE# FIRST CONVOLUTIONAL LAYERNEWLINEmodel_ranging.add(Conv2D(128, (2, 2), input_shape=(no_of_features, sample_size, 1), activation='relu')) # CHANGE # Stride (1, 1)NEWLINEmodel_ranging.add(MaxPooling2D()) # Pool size: (2, 2) and stride (2, 2)NEWLINEmodel_ranging.add(Dropout(0.2))NEWLINENEWLINE# SECOND CONVOLUTIONAL LAYERNEWLINEmodel_ranging.add(Conv2D(64, (2, 2), activation='relu'))NEWLINEmodel_ranging.add(MaxPooling2D())NEWLINEmodel_ranging.add(Dropout(dr))NEWLINENEWLINEmodel_ranging.add(Flatten())NEWLINENEWLINE# FIRST DENSE LAYERNEWLINEmodel_ranging.add(Dense(256, activation='relu'))NEWLINENEWLINE# SECOND DENSE LAYERNEWLINEmodel_ranging.add(Dense(128, activation='relu'))NEWLINENEWLINE# OUTPUT LAYERNEWLINEmodel_ranging.add(Dense(num_classes, activation='softmax'))NEWLINENEWLINE# Compile modelNEWLINE# For a multi-class classification problemNEWLINEsgd = SGD(lr=0.0001, decay=1e-6, momentum=0.9, nesterov=True)NEWLINEadam = Adam(lr=0.0001, beta_1=0.9, beta_2=0.999, epsilon=1e-8)NEWLINENEWLINE# model.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['accuracy']) # Multiclass classification with rmspropNEWLINENEWLINE#model.compile(optimizer='sgd', loss='categorical_crossentropy',metrics=['acc', f1_m, precision_m, recall_m]) # Multiclass classification with rms adam optimizer # CHANGENEWLINENEWLINEmodel_ranging.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['acc', f1_m, precision_m, recall_m])NEWLINENEWLINEmodel_ranging.summary()NEWLINEfilepath = '/Users/debashri/Desktop/DirectionFinding_Plots/Indoor/direction_data_ranging_2D_CNN_Mapping.wts.h5'NEWLINEprint("The dropout rate was: ")NEWLINEprint(dr)NEWLINENEWLINENEWLINE# Fit the modelNEWLINE# history= model.fit(xtrain, ytrain_one_hot, epochs=nb_epoch, batch_size=batch_size, validation_data = (xtest, ytest_one_hot), callbacks = [keras.callbacks.ModelCheckpoint(filepath, monitor='val_loss', verbose=2, save_best_only=True, mode='auto'), keras.callbacks.EarlyStopping(monitor='val_loss', patience=5, verbose=2, mode='auto')])NEWLINEhistory = model_ranging.fit(xtrain_ranging, ytrain_ranging_one_hot, epochs=nb_epoch, batch_size=batch_size, validation_split=0.1, callbacks=[NEWLINE keras.callbacks.ModelCheckpoint(filepath, monitor='val_loss', verbose=2, save_best_only=True, mode='auto'),NEWLINE keras.callbacks.EarlyStopping(monitor='val_loss', patience=5, verbose=2, mode='auto')])NEWLINENEWLINENEWLINENEWLINE# SAVING THE MODEL FOR TRANSFER LEARNINGNEWLINEsaved_file = '/Users/debashri/Desktop/DirectionFinding_Plots/Indoor/2D_CNN_ranging_classifier.h5'NEWLINEmodel_ranging.save(saved_file) # SAVING THE MODEL FOR TRANSFER LEARNINGNEWLINENEWLINENEWLINENEWLINE# Evaluate the modelNEWLINEloss, accuracy, f1_score, precision, recall = model_ranging.evaluate(xtest_ranging, ytest_ranging_one_hot, batch_size=batch_size) # CHANGENEWLINEprint("\nTest Loss: %s: %.2f%%" % (model_ranging.metrics_names[0], loss * 100)) # CHANGENEWLINEprint("\nTest Accuracy: %s: %.2f%%" % (model_ranging.metrics_names[1], accuracy * 100)) # CHANGENEWLINEprint("\nTest F1 Score: %s: %.2f" % (model_ranging.metrics_names[2], f1_score)) # CHANGENEWLINEprint("\nTest Precision: %s: %.2f%%" % (model_ranging.metrics_names[3], precision * 100)) # CHANGENEWLINEprint("\nTest Recall: %s: %.2f%%" % (model_ranging.metrics_names[4], recall * 100)) # CHANGENEWLINENEWLINE# Calculating total execution timeNEWLINEend_time = time.time() # Taking end time to calculate overall execution timeNEWLINEprint("\n Total Execution Time (Minutes): ")NEWLINEprint(((end_time - start_time) / 60))NEWLINENEWLINE#### SET PLOTTING PARAMETERS #########NEWLINEparams = {'legend.fontsize': 'xx-large',NEWLINE 'axes.labelsize': 'xx-large',NEWLINE 'axes.titlesize': 'xx-large',NEWLINE 'xtick.labelsize': 'xx-large',NEWLINE 'ytick.labelsize': 'xx-large'}NEWLINEplt.rcParams.update(params)NEWLINENEWLINENEWLINE# Show Accuracy CurvesNEWLINEfig = plt.figure()NEWLINE# plt.title('Training Performance')NEWLINEplt.plot(history.epoch, history.history['acc'], label='Training Accuracy', linewidth=2.0, c='b')NEWLINEplt.plot(history.epoch, history.history['val_acc'], label='Validation Accuracy', linewidth=2.0, c='r')NEWLINEplt.ylabel('Accuracy(%)')NEWLINEplt.xlabel('Epoch')NEWLINEplt.legend()NEWLINEplt.tight_layout()NEWLINEfig.savefig('/Users/debashri/Desktop/DirectionFinding_Plots/Indoor/direction_ranging_acc_2D_CNN_Mapping.png') # save the figure to fileNEWLINEplt.close(fig)NEWLINENEWLINENEWLINE# plt.show()NEWLINENEWLINENEWLINEdef plot_confusion_matrix(cm, title='Confusion Matrix', cmap=plt.cm.YlGnBu, labels=[], normalize=False, filedest = ''):NEWLINE if normalize:NEWLINE cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]NEWLINE # print("Normalized confusion matrix")NEWLINE else:NEWLINE cm = cm.astype('int')NEWLINE # print('Confusion matrix, without normalization')NEWLINE plt.rcParams.update(params) # ADDEDNEWLINE fig = plt.figure(figsize=(12,12))NEWLINE plt.imshow(cm, interpolation='nearest', cmap=cmap)NEWLINE # plt.title(title)NEWLINE plt.colorbar()NEWLINE tick_marks = np.arange(len(labels))NEWLINE plt.xticks(tick_marks, labels, rotation=45)NEWLINE plt.yticks(tick_marks, labels)NEWLINE thresh = cm.max() / 2NEWLINE fmt = '.2f' if normalize else 'd'NEWLINE for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):NEWLINE # plt.text(j, i,"{:,}".format(cm[i, j]),NEWLINE plt.text(j, i, format(cm[i, j], fmt),NEWLINE horizontalalignment="center", fontsize="xx-large",NEWLINE color="white" if cm[i, j] > thresh else "black")NEWLINENEWLINE plt.ylabel('True label')NEWLINE plt.xlabel('Predicted label')NEWLINE # fig, ax = plt.subplots(nrows=1, ncols=1) # create figure & 1 axisNEWLINE # ax.plot([0, 1, 2], [10, 20, 3])NEWLINE plt.tight_layout()NEWLINE fig.savefig(filedest) # save the figure to fileNEWLINE plt.close(fig)NEWLINENEWLINENEWLINE# plt.show()NEWLINENEWLINENEWLINENEWLINE# Plot confusion matrixNEWLINEtest_Y_hat = model_ranging.predict(xtest_ranging, batch_size=batch_size)NEWLINEconf = np.zeros([len(classes), len(classes)])NEWLINEconfnorm = np.zeros([len(classes), len(classes)])NEWLINEfor i in range(0, xtest_ranging.shape[0]):NEWLINE j = list(ytest_ranging_one_hot[i, :]).index(1)NEWLINE k = int(np.argmax(test_Y_hat[i, :]))NEWLINE conf[j, k] = conf[j, k] + 1NEWLINEplot_confusion_matrix(conf, labels=classes, normalize=False, filedest='/Users/debashri/Desktop/DirectionFinding_Plots/Indoor/direction_ranging_conf_mat_2D_CNN_Mapping.png')NEWLINENEWLINENEWLINE#############################################################################################################################NEWLINE######## #######NEWLINE######## HIERARCHICAL TRAINING- SECOND STEP #######NEWLINE######## CLASSIFYING THE DATA BASED ON POSES OF TRANSMITER ANTENNA #######NEWLINE######## PREDICTING REALTIVE POSES OF TRANSMITER ANTENNA #######NEWLINE######## DISTANCE: 5FT #######NEWLINE#############################################################################################################################NEWLINENEWLINENEWLINE#################### NORMALIZE THE X DATA #######################NEWLINENEWLINENEWLINEstandard = preprocessing.StandardScaler().fit(xdata_5ft) # Normalize the data with zero mean and unit variance for each columnNEWLINExdata_5ft = standard.transform(xdata_5ft)NEWLINENEWLINEprint("############## STARTING THE TRAINING TO PREDICT THE POSES OF TRANSMITTER ANTENNA WITH 5 FT DISTANCE FROM RECEIVER ##########################")NEWLINENEWLINENEWLINENEWLINE############### SEPARATING TRAIN AND TEST DATA #######################NEWLINENEWLINExtrain_5ft, xtest_5ft, ytrain_5ft, ytest_5ft = train_test_split(xdata_5ft, ydata_5ft, test_size=0.2, shuffle = True, random_state=42) # Randomly shuffling and 80/20 is train/test sizeNEWLINEprint("XTRAIN AND XTEST SHAPE:", xtrain_5ft.shape, xtest_5ft.shape)NEWLINEprint("YTRAIN AND YTEST SHAPE:", ytrain_5ft.shape, ytest_5ft.shape)NEWLINENEWLINE# RESHAPING THE DATA FROM 2 DIMENSIONAL TO 4 DIMENSIONAL SHAPE - NEEDED TO APPLY TO USE 2D-CONVOLUTIONNEWLINE# reshape to be [samples][width][height][channels]NEWLINExtrain_5ft = xtrain_5ft.reshape((xtrain_5ft.shape[0], no_of_features, sample_size, 1)).astype('float32')NEWLINExtest_5ft = xtest_5ft.reshape((xtest_5ft.shape[0], no_of_features, sample_size, 1)).astype('float32')NEWLINENEWLINENEWLINEnum_classes = 12 # TOTAL NUMBER OF RANGESNEWLINENEWLINENEWLINENEWLINE# Convert labels to categorical one-hot encodingNEWLINEytrain_5ft_one_hot = to_categorical(ytrain_5ft, num_classes=num_classes) # DEFINE THE NUMBER OF TOTAL CLASSES IN LABELNEWLINEytest_5ft_one_hot = to_categorical(ytest_5ft, num_classes=num_classes)NEWLINENEWLINENEWLINEprint("XTRAIN AND XTEST SHAPE:", xtrain_5ft.shape, xtest_5ft.shape)NEWLINEprint("YTRAIN AND YTEST SHAPE:", ytrain_5ft_one_hot.shape, ytest_5ft_one_hot.shape)NEWLINENEWLINE############################################################NEWLINE# #NEWLINE######## Building a 2D Convolutional Neural Network #####NEWLINE# #NEWLINE############################################################NEWLINENEWLINEdr = 0.6 # dropout rate (%)NEWLINEbatch_size = 128 # Mini batch sizeNEWLINEnb_epoch = 100 # Number of Epoch (Give a higher number to get better accuracy)NEWLINENEWLINEclasses = ["0", "+30", "+60", "+90", "+120", "+150", "180", "-150", "-120", "-90", "-60", "-30"] # CHANGE LABELNEWLINEin_shp = list(xtrain_5ft.shape[1:]) # Input DimensionNEWLINEprint(in_shp)NEWLINE# model = models.Sequential()NEWLINEtimesteps=1NEWLINEdata_dim=xtrain_5ft.shape[1]NEWLINENEWLINENEWLINENEWLINE# print ("AFTER RESHAPE")NEWLINEytrain_5ft_one_hot = np.reshape(ytrain_5ft_one_hot, (ytrain_5ft_one_hot.shape[0], num_classes)) # Used in trainingNEWLINEytest_5ft_one_hot = np.reshape(ytest_5ft_one_hot, (ytest_5ft_one_hot.shape[0], num_classes)) # Used in trainingNEWLINENEWLINEstart_time = time.time() # Taking start time to calculate overall execution timeNEWLINENEWLINE# IMPLEMENTING THE TRANSFER LEARNINGNEWLINE#source_model = load_model(saved_file)NEWLINE# loading the previously saved modelNEWLINEsource_model = load_model(saved_file, custom_objects={NEWLINE "f1_m": f1_m,NEWLINE "precision_m": precision_m,NEWLINE "recall_m": recall_mNEWLINE })NEWLINENEWLINEmodel_pose = Sequential()NEWLINEfor layer in source_model.layers[:-1]: # go through until last layerNEWLINE model_pose.add(layer)NEWLINENEWLINENEWLINE# ADDING OUTPUT LAYERNEWLINEmodel_pose.add(Dense(num_classes, activation='softmax'))NEWLINENEWLINE# Compile modelNEWLINE# For a multi-class classification problemNEWLINEsgd = SGD(lr=0.0001, decay=1e-6, momentum=0.9, nesterov=True)NEWLINEadam = Adam(lr=0.0001, beta_1=0.9, beta_2=0.999, epsilon=1e-8)NEWLINENEWLINE# model.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['accuracy']) # Multiclass classification with rmspropNEWLINENEWLINE#model.compile(optimizer='sgd', loss='categorical_crossentropy',metrics=['acc', f1_m, precision_m, recall_m]) # Multiclass classification with rms adam optimizer # CHANGENEWLINENEWLINEmodel_pose.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['acc', f1_m, precision_m, recall_m])NEWLINENEWLINEmodel_pose.summary()NEWLINEfilepath = '/Users/debashri/Desktop/DirectionFinding_Plots/Indoor/5ft/direction_data_12_poses_2D_CNN_Mapping.wts.h5'NEWLINEprint("The dropout rate was: ")NEWLINEprint(dr)NEWLINENEWLINENEWLINE# Fit the modelNEWLINE# history= model.fit(xtrain, ytrain_one_hot, epochs=nb_epoch, batch_size=batch_size, validation_data = (xtest, ytest_one_hot), callbacks = [keras.callbacks.ModelCheckpoint(filepath, monitor='val_loss', verbose=2, save_best_only=True, mode='auto'), keras.callbacks.EarlyStopping(monitor='val_loss', patience=5, verbose=2, mode='auto')])NEWLINEhistory = model_pose.fit(xtrain_5ft, ytrain_5ft_one_hot, epochs=nb_epoch, batch_size=batch_size, validation_split=0.1, callbacks=[NEWLINE keras.callbacks.ModelCheckpoint(filepath, monitor='val_loss', verbose=2, save_best_only=True, mode='auto'),NEWLINE keras.callbacks.EarlyStopping(monitor='val_loss', patience=5, verbose=2, mode='auto')])NEWLINENEWLINENEWLINE# Evaluate the modelNEWLINEloss, accuracy, f1_score, precision, recall = model_pose.evaluate(xtest_5ft, ytest_5ft_one_hot, batch_size=batch_size) # CHANGENEWLINEprint("\nTest Loss: %s: %.2f%%" % (model_pose.metrics_names[0], loss * 100)) # CHANGENEWLINEprint("\nTest Accuracy: %s: %.2f%%" % (model_pose.metrics_names[1], accuracy * 100)) # CHANGENEWLINEprint("\nTest F1 Score: %s: %.2f" % (model_pose.metrics_names[2], f1_score)) # CHANGENEWLINEprint("\nTest Precision: %s: %.2f%%" % (model_pose.metrics_names[3], precision * 100)) # CHANGENEWLINEprint("\nTest Recall: %s: %.2f%%" % (model_pose.metrics_names[4], recall * 100)) # CHANGENEWLINENEWLINE# Calculating total execution timeNEWLINEend_time = time.time() # Taking end time to calculate overall execution timeNEWLINEprint("\n Total Execution Time (Minutes): ")NEWLINEprint(((end_time - start_time) / 60))NEWLINENEWLINE#### SET PLOTTING PARAMETERS #########NEWLINEparams = {'legend.fontsize': 'xx-large',NEWLINE 'axes.labelsize': 'xx-large',NEWLINE 'axes.titlesize': 'xx-large',NEWLINE 'xtick.labelsize': 'xx-large',NEWLINE 'ytick.labelsize': 'xx-large'}NEWLINEplt.rcParams.update(params)NEWLINENEWLINENEWLINE# Show Accuracy CurvesNEWLINEfig = plt.figure()NEWLINE# plt.title('Training Performance')NEWLINEplt.plot(history.epoch, history.history['acc'], label='Training Accuracy', linewidth=2.0, c='b')NEWLINEplt.plot(history.epoch, history.history['val_acc'], label='Validation Accuracy', linewidth=2.0, c='r')NEWLINEplt.ylabel('Accuracy(%)')NEWLINEplt.xlabel('Epoch')NEWLINEplt.legend()NEWLINEplt.tight_layout()NEWLINEfig.savefig('/Users/debashri/Desktop/DirectionFinding_Plots/Indoor/5ft/direction_12_poses_acc_2D_CNN_Mapping.png') # save the figure to fileNEWLINEplt.close(fig)NEWLINENEWLINENEWLINE# plt.show()NEWLINENEWLINENEWLINE# Plot confusion matrixNEWLINEtest_Y_hat = model_pose.predict(xtest_5ft, batch_size=batch_size)NEWLINEconf = np.zeros([len(classes), len(classes)])NEWLINEconfnorm = np.zeros([len(classes), len(classes)])NEWLINEfor i in range(0, xtest_5ft.shape[0]):NEWLINE j = list(ytest_5ft_one_hot[i, :]).index(1)NEWLINE k = int(np.argmax(test_Y_hat[i, :]))NEWLINE conf[j, k] = conf[j, k] + 1NEWLINEplot_confusion_matrix(conf, labels=classes, normalize=False, filedest= '/Users/debashri/Desktop/DirectionFinding_Plots/Indoor/5ft/direction_12_poses_conf_mat_2D_CNN_Mapping.png')NEWLINENEWLINENEWLINE#############################################################################################################################NEWLINE######## #######NEWLINE######## HIERARCHICAL TRAINING- SECOND STEP #######NEWLINE######## CLASSIFYING THE DATA BASED ON POSES OF TRANSMITER ANTENNA #######NEWLINE######## PREDICTING REALTIVE POSES OF TRANSMITER ANTENNA #######NEWLINE######## DISTANCE: 10FT #######NEWLINE#############################################################################################################################NEWLINENEWLINENEWLINE#################### NORMALIZE THE X DATA #######################NEWLINENEWLINENEWLINEstandard = preprocessing.StandardScaler().fit(xdata_10ft) # Normalize the data with zero mean and unit variance for each columnNEWLINExdata_10ft = standard.transform(xdata_10ft)NEWLINENEWLINEprint("############## STARTING THE TRAINING TO PREDICT THE POSES OF TRANSMITTER ANTENNA WITH 10 FT DISTANCE FROM RECEIVER ##########################")NEWLINENEWLINENEWLINE############### SEPARATING TRAIN AND TEST DATA #######################NEWLINENEWLINExtrain_10ft, xtest_10ft, ytrain_10ft, ytest_10ft = train_test_split(xdata_10ft, ydata_10ft, test_size=0.2, shuffle = True, random_state=42) # Randomly shuffling and 80/20 is train/test sizeNEWLINEprint("XTRAIN AND XTEST SHAPE:", xtrain_10ft.shape, xtest_10ft.shape)NEWLINEprint("YTRAIN AND YTEST SHAPE:", ytrain_10ft.shape, ytest_10ft.shape)NEWLINENEWLINE# RESHAPING THE DATA FROM 2 DIMENSIONAL TO 4 DIMENSIONAL SHAPE - NEEDED TO APPLY TO USE 2D-CONVOLUTIONNEWLINE# reshape to be [samples][width][height][channels]NEWLINExtrain_10ft = xtrain_10ft.reshape((xtrain_10ft.shape[0], no_of_features, sample_size, 1)).astype('float32')NEWLINExtest_10ft = xtest_10ft.reshape((xtest_10ft.shape[0], no_of_features, sample_size, 1)).astype('float32')NEWLINENEWLINENEWLINEnum_classes = 12 # TOTAL NUMBER OF RANGESNEWLINENEWLINENEWLINENEWLINE# Convert labels to categorical one-hot encodingNEWLINEytrain_10ft_one_hot = to_categorical(ytrain_10ft, num_classes=num_classes) # DEFINE THE NUMBER OF TOTAL CLASSES IN LABELNEWLINEytest_10ft_one_hot = to_categorical(ytest_10ft, num_classes=num_classes)NEWLINENEWLINENEWLINEprint("XTRAIN AND XTEST SHAPE:", xtrain_10ft.shape, xtest_10ft.shape)NEWLINEprint("YTRAIN AND YTEST SHAPE:", ytrain_10ft_one_hot.shape, ytest_10ft_one_hot.shape)NEWLINENEWLINE############################################################NEWLINE# #NEWLINE######## Building a 2D Convolutional Neural Network #####NEWLINE# #NEWLINE############################################################NEWLINENEWLINEdr = 0.6 # dropout rate (%)NEWLINEbatch_size = 128 # Mini batch sizeNEWLINEnb_epoch = 100 # Number of Epoch (Give a higher number to get better accuracy)NEWLINENEWLINEclasses = ["0", "+30", "+60", "+90", "+120", "+150", "180", "-150", "-120", "-90", "-60", "-30"] # CHANGE LABELNEWLINEin_shp = list(xtrain_10ft.shape[1:]) # Input DimensionNEWLINEprint(in_shp)NEWLINE# model = models.Sequential()NEWLINEtimesteps=1NEWLINEdata_dim=xtrain_10ft.shape[1]NEWLINENEWLINENEWLINENEWLINE# print ("AFTER RESHAPE")NEWLINEytrain_10ft_one_hot = np.reshape(ytrain_10ft_one_hot, (ytrain_10ft_one_hot.shape[0], num_classes)) # Used in trainingNEWLINEytest_10ft_one_hot = np.reshape(ytest_10ft_one_hot, (ytest_10ft_one_hot.shape[0], num_classes)) # Used in trainingNEWLINENEWLINEstart_time = time.time() # Taking start time to calculate overall execution timeNEWLINENEWLINE# IMPLEMENTING THE TRANSFER LEARNINGNEWLINE#source_model = load_model(saved_file)NEWLINE# loading the previously saved modelNEWLINEsource_model = load_model(saved_file, custom_objects={NEWLINE "f1_m": f1_m,NEWLINE "precision_m": precision_m,NEWLINE "recall_m": recall_mNEWLINE })NEWLINENEWLINEmodel_pose = Sequential()NEWLINEfor layer in source_model.layers[:-1]: # go through until last layerNEWLINE model_pose.add(layer)NEWLINENEWLINENEWLINE# ADDING OUTPUT LAYERNEWLINEmodel_pose.add(Dense(num_classes, activation='softmax'))NEWLINENEWLINE# Compile modelNEWLINE# For a multi-class classification problemNEWLINEsgd = SGD(lr=0.0001, decay=1e-6, momentum=0.9, nesterov=True)NEWLINEadam = Adam(lr=0.0001, beta_1=0.9, beta_2=0.999, epsilon=1e-8)NEWLINENEWLINE# model.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['accuracy']) # Multiclass classification with rmspropNEWLINENEWLINE#model.compile(optimizer='sgd', loss='categorical_crossentropy',metrics=['acc', f1_m, precision_m, recall_m]) # Multiclass classification with rms adam optimizer # CHANGENEWLINENEWLINEmodel_pose.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['acc', f1_m, precision_m, recall_m])NEWLINENEWLINEmodel_pose.summary()NEWLINEfilepath = '/Users/debashri/Desktop/DirectionFinding_Plots/Indoor/10ft/direction_data_12_poses_2D_CNN_Mapping.wts.h5'NEWLINEprint("The dropout rate was: ")NEWLINEprint(dr)NEWLINENEWLINENEWLINE# Fit the modelNEWLINE# history= model.fit(xtrain, ytrain_one_hot, epochs=nb_epoch, batch_size=batch_size, validation_data = (xtest, ytest_one_hot), callbacks = [keras.callbacks.ModelCheckpoint(filepath, monitor='val_loss', verbose=2, save_best_only=True, mode='auto'), keras.callbacks.EarlyStopping(monitor='val_loss', patience=5, verbose=2, mode='auto')])NEWLINEhistory = model_pose.fit(xtrain_10ft, ytrain_10ft_one_hot, epochs=nb_epoch, batch_size=batch_size, validation_split=0.1, callbacks=[NEWLINE keras.callbacks.ModelCheckpoint(filepath, monitor='val_loss', verbose=2, save_best_only=True, mode='auto'),NEWLINE keras.callbacks.EarlyStopping(monitor='val_loss', patience=5, verbose=2, mode='auto')])NEWLINENEWLINENEWLINE# Evaluate the modelNEWLINEloss, accuracy, f1_score, precision, recall = model_pose.evaluate(xtest_10ft, ytest_10ft_one_hot, batch_size=batch_size) # CHANGENEWLINEprint("\nTest Loss: %s: %.2f%%" % (model_pose.metrics_names[0], loss * 100)) # CHANGENEWLINEprint("\nTest Accuracy: %s: %.2f%%" % (model_pose.metrics_names[1], accuracy * 100)) # CHANGENEWLINEprint("\nTest F1 Score: %s: %.2f" % (model_pose.metrics_names[2], f1_score)) # CHANGENEWLINEprint("\nTest Precision: %s: %.2f%%" % (model_pose.metrics_names[3], precision * 100)) # CHANGENEWLINEprint("\nTest Recall: %s: %.2f%%" % (model_pose.metrics_names[4], recall * 100)) # CHANGENEWLINENEWLINE# Calculating total execution timeNEWLINEend_time = time.time() # Taking end time to calculate overall execution timeNEWLINEprint("\n Total Execution Time (Minutes): ")NEWLINEprint(((end_time - start_time) / 60))NEWLINENEWLINE#### SET PLOTTING PARAMETERS #########NEWLINEparams = {'legend.fontsize': 'xx-large',NEWLINE 'axes.labelsize': 'xx-large',NEWLINE 'axes.titlesize': 'xx-large',NEWLINE 'xtick.labelsize': 'xx-large',NEWLINE 'ytick.labelsize': 'xx-large'}NEWLINEplt.rcParams.update(params)NEWLINENEWLINENEWLINE# Show Accuracy CurvesNEWLINEfig = plt.figure()NEWLINE# plt.title('Training Performance')NEWLINEplt.plot(history.epoch, history.history['acc'], label='Training Accuracy', linewidth=2.0, c='b')NEWLINEplt.plot(history.epoch, history.history['val_acc'], label='Validation Accuracy', linewidth=2.0, c='r')NEWLINEplt.ylabel('Accuracy(%)')NEWLINEplt.xlabel('Epoch')NEWLINEplt.legend()NEWLINEplt.tight_layout()NEWLINEfig.savefig('/Users/debashri/Desktop/DirectionFinding_Plots/Indoor/10ft/direction_12_poses_acc_2D_CNN_Mapping.png') # save the figure to fileNEWLINEplt.close(fig)NEWLINENEWLINENEWLINE# plt.show()NEWLINENEWLINENEWLINE# Plot confusion matrixNEWLINEtest_Y_hat = model_pose.predict(xtest_10ft, batch_size=batch_size)NEWLINEconf = np.zeros([len(classes), len(classes)])NEWLINEconfnorm = np.zeros([len(classes), len(classes)])NEWLINEfor i in range(0, xtest_10ft.shape[0]):NEWLINE j = list(ytest_10ft_one_hot[i, :]).index(1)NEWLINE k = int(np.argmax(test_Y_hat[i, :]))NEWLINE conf[j, k] = conf[j, k] + 1NEWLINEplot_confusion_matrix(conf, labels=classes, normalize=False, filedest= '/Users/debashri/Desktop/DirectionFinding_Plots/Indoor/10ft/direction_12_poses_conf_mat_2D_CNN_Mapping.png')NEWLINENEWLINENEWLINE#############################################################################################################################NEWLINE######## #######NEWLINE######## HIERARCHICAL TRAINING- SECOND STEP #######NEWLINE######## CLASSIFYING THE DATA BASED ON POSES OF TRANSMITER ANTENNA #######NEWLINE######## PREDICTING REALTIVE POSES OF TRANSMITER ANTENNA #######NEWLINE######## DISTANCE: 15FT #######NEWLINE#############################################################################################################################NEWLINENEWLINENEWLINE#################### NORMALIZE THE X DATA #######################NEWLINENEWLINENEWLINEstandard = preprocessing.StandardScaler().fit(xdata_15ft) # Normalize the data with zero mean and unit variance for each columnNEWLINExdata_15ft = standard.transform(xdata_15ft)NEWLINENEWLINEprint("############## STARTING THE TRAINING TO PREDICT THE POSES OF TRANSMITTER ANTENNA WITH 15 FT DISTANCE FROM RECEIVER ##########################")NEWLINENEWLINENEWLINE############### SEPARATING TRAIN AND TEST DATA #######################NEWLINENEWLINExtrain_15ft, xtest_15ft, ytrain_15ft, ytest_15ft = train_test_split(xdata_15ft, ydata_15ft, test_size=0.2, shuffle = True, random_state=42) # Randomly shuffling and 80/20 is train/test sizeNEWLINEprint("XTRAIN AND XTEST SHAPE:", xtrain_5ft.shape, xtest_5ft.shape)NEWLINEprint("YTRAIN AND YTEST SHAPE:", ytrain_5ft.shape, ytest_5ft.shape)NEWLINENEWLINE# RESHAPING THE DATA FROM 2 DIMENSIONAL TO 4 DIMENSIONAL SHAPE - NEEDED TO APPLY TO USE 2D-CONVOLUTIONNEWLINE# reshape to be [samples][width][height][channels]NEWLINExtrain_15ft = xtrain_15ft.reshape((xtrain_15ft.shape[0], no_of_features, sample_size, 1)).astype('float32')NEWLINExtest_15ft = xtest_15ft.reshape((xtest_15ft.shape[0], no_of_features, sample_size, 1)).astype('float32')NEWLINENEWLINENEWLINEnum_classes = 12 # TOTAL NUMBER OF RANGESNEWLINENEWLINENEWLINENEWLINE# Convert labels to categorical one-hot encodingNEWLINEytrain_15ft_one_hot = to_categorical(ytrain_15ft, num_classes=num_classes) # DEFINE THE NUMBER OF TOTAL CLASSES IN LABELNEWLINEytest_15ft_one_hot = to_categorical(ytest_15ft, num_classes=num_classes)NEWLINENEWLINENEWLINEprint("XTRAIN AND XTEST SHAPE:", xtrain_15ft.shape, xtest_15ft.shape)NEWLINEprint("YTRAIN AND YTEST SHAPE:", ytrain_15ft_one_hot.shape, ytest_15ft_one_hot.shape)NEWLINENEWLINE############################################################NEWLINE# #NEWLINE######## Building a 2D Convolutional Neural Network #####NEWLINE# #NEWLINE############################################################NEWLINENEWLINEdr = 0.6 # dropout rate (%)NEWLINEbatch_size = 128 # Mini batch sizeNEWLINEnb_epoch = 100 # Number of Epoch (Give a higher number to get better accuracy)NEWLINENEWLINEclasses = ["0", "+30", "+60", "+90", "+120", "+150", "180", "-150", "-120", "-90", "-60", "-30"] # CHANGE LABELNEWLINEin_shp = list(xtrain_15ft.shape[1:]) # Input DimensionNEWLINEprint(in_shp)NEWLINE# model = models.Sequential()NEWLINEtimesteps=1NEWLINEdata_dim=xtrain_15ft.shape[1]NEWLINENEWLINENEWLINENEWLINE# print ("AFTER RESHAPE")NEWLINEytrain_15ft_one_hot = np.reshape(ytrain_15ft_one_hot, (ytrain_15ft_one_hot.shape[0], num_classes)) # Used in trainingNEWLINEytest_15ft_one_hot = np.reshape(ytest_15ft_one_hot, (ytest_15ft_one_hot.shape[0], num_classes)) # Used in trainingNEWLINENEWLINEstart_time = time.time() # Taking start time to calculate overall execution timeNEWLINENEWLINE# IMPLEMENTING THE TRANSFER LEARNINGNEWLINE#source_model = load_model(saved_file)NEWLINE# loading the previously saved modelNEWLINEsource_model = load_model(saved_file, custom_objects={NEWLINE "f1_m": f1_m,NEWLINE "precision_m": precision_m,NEWLINE "recall_m": recall_mNEWLINE })NEWLINENEWLINEmodel_pose = Sequential()NEWLINEfor layer in source_model.layers[:-1]: # go through until last layerNEWLINE model_pose.add(layer)NEWLINENEWLINENEWLINE# ADDING OUTPUT LAYERNEWLINEmodel_pose.add(Dense(num_classes, activation='softmax'))NEWLINENEWLINE# Compile modelNEWLINE# For a multi-class classification problemNEWLINEsgd = SGD(lr=0.0001, decay=1e-6, momentum=0.9, nesterov=True)NEWLINEadam = Adam(lr=0.0001, beta_1=0.9, beta_2=0.999, epsilon=1e-8)NEWLINENEWLINE# model.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['accuracy']) # Multiclass classification with rmspropNEWLINENEWLINE#model.compile(optimizer='sgd', loss='categorical_crossentropy',metrics=['acc', f1_m, precision_m, recall_m]) # Multiclass classification with rms adam optimizer # CHANGENEWLINENEWLINEmodel_pose.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['acc', f1_m, precision_m, recall_m])NEWLINENEWLINEmodel_pose.summary()NEWLINEfilepath = '/Users/debashri/Desktop/DirectionFinding_Plots/Indoor/15ft/direction_data_12_poses_2D_CNN_Mapping.wts.h5'NEWLINEprint("The dropout rate was: ")NEWLINEprint(dr)NEWLINENEWLINENEWLINE# Fit the modelNEWLINE# history= model.fit(xtrain, ytrain_one_hot, epochs=nb_epoch, batch_size=batch_size, validation_data = (xtest, ytest_one_hot), callbacks = [keras.callbacks.ModelCheckpoint(filepath, monitor='val_loss', verbose=2, save_best_only=True, mode='auto'), keras.callbacks.EarlyStopping(monitor='val_loss', patience=5, verbose=2, mode='auto')])NEWLINEhistory = model_pose.fit(xtrain_15ft, ytrain_15ft_one_hot, epochs=nb_epoch, batch_size=batch_size, validation_split=0.1, callbacks=[NEWLINE keras.callbacks.ModelCheckpoint(filepath, monitor='val_loss', verbose=2, save_best_only=True, mode='auto'),NEWLINE keras.callbacks.EarlyStopping(monitor='val_loss', patience=5, verbose=2, mode='auto')])NEWLINENEWLINENEWLINE# Evaluate the modelNEWLINEloss, accuracy, f1_score, precision, recall = model_pose.evaluate(xtest_15ft, ytest_15ft_one_hot, batch_size=batch_size) # CHANGENEWLINEprint("\nTest Loss: %s: %.2f%%" % (model_pose.metrics_names[0], loss * 100)) # CHANGENEWLINEprint("\nTest Accuracy: %s: %.2f%%" % (model_pose.metrics_names[1], accuracy * 100)) # CHANGENEWLINEprint("\nTest F1 Score: %s: %.2f" % (model_pose.metrics_names[2], f1_score)) # CHANGENEWLINEprint("\nTest Precision: %s: %.2f%%" % (model_pose.metrics_names[3], precision * 100)) # CHANGENEWLINEprint("\nTest Recall: %s: %.2f%%" % (model_pose.metrics_names[4], recall * 100)) # CHANGENEWLINENEWLINE# Calculating total execution timeNEWLINEend_time = time.time() # Taking end time to calculate overall execution timeNEWLINEprint("\n Total Execution Time (Minutes): ")NEWLINEprint(((end_time - start_time) / 60))NEWLINENEWLINE#### SET PLOTTING PARAMETERS #########NEWLINEparams = {'legend.fontsize': 'xx-large',NEWLINE 'axes.labelsize': 'xx-large',NEWLINE 'axes.titlesize': 'xx-large',NEWLINE 'xtick.labelsize': 'xx-large',NEWLINE 'ytick.labelsize': 'xx-large'}NEWLINEplt.rcParams.update(params)NEWLINENEWLINENEWLINE# Show Accuracy CurvesNEWLINEfig = plt.figure()NEWLINE# plt.title('Training Performance')NEWLINEplt.plot(history.epoch, history.history['acc'], label='Training Accuracy', linewidth=2.0, c='b')NEWLINEplt.plot(history.epoch, history.history['val_acc'], label='Validation Accuracy', linewidth=2.0, c='r')NEWLINEplt.ylabel('Accuracy(%)')NEWLINEplt.xlabel('Epoch')NEWLINEplt.legend()NEWLINEplt.tight_layout()NEWLINEfig.savefig('/Users/debashri/Desktop/DirectionFinding_Plots/Indoor/15ft/direction_12_poses_acc_2D_CNN_Mapping.png') # save the figure to fileNEWLINEplt.close(fig)NEWLINENEWLINENEWLINE# plt.show()NEWLINENEWLINENEWLINE# Plot confusion matrixNEWLINEtest_Y_hat = model_pose.predict(xtest_15ft, batch_size=batch_size)NEWLINEconf = np.zeros([len(classes), len(classes)])NEWLINEconfnorm = np.zeros([len(classes), len(classes)])NEWLINEfor i in range(0, xtest_15ft.shape[0]):NEWLINE j = list(ytest_15ft_one_hot[i, :]).index(1)NEWLINE k = int(np.argmax(test_Y_hat[i, :]))NEWLINE conf[j, k] = conf[j, k] + 1NEWLINEplot_confusion_matrix(conf, labels=classes, normalize=False, filedest= '/Users/debashri/Desktop/DirectionFinding_Plots/Indoor/15ft/direction_12_poses_conf_mat_2D_CNN_Mapping.png')NEWLINE |
from tkinter import *NEWLINEfrom new_main import semaphore_algoNEWLINENEWLINEwindow = Tk()NEWLINEwindow.title("Timetable Generation OS Project")NEWLINENEWLINENEWLINEclass ProvideException(object):NEWLINE def __init__(self, func):NEWLINE self._func = funcNEWLINENEWLINE def __call__(self, *args):NEWLINE try:NEWLINE return self._func(*args)NEWLINE except ValueError:NEWLINE print("Please enter Numerical values only")NEWLINE except KeyboardInterrupt:NEWLINE print("You hit a interrupt key like ' ctrl+c' or 'ctrl+v'. Please rerun the code. ")NEWLINENEWLINE@ProvideExceptionNEWLINEdef set_values():NEWLINE list_1 = [label3_1.get(), label3_2.get(), label3_3.get(), label3_4.get()] #Batch 1NEWLINE list_2 = [label4_1.get(), label4_2.get(), label4_3.get(), label4_4.get()] #Batch 2NEWLINE list_3 = [label5_1.get(), label5_2.get(), label5_3.get(), label5_4.get()] #Batch 3NEWLINE list_4 = [label6_1.get(), label6_2.get(), label6_3.get(), label6_4.get()] #Batch 4NEWLINE final_list = [list_1, list_2, list_3, list_4]NEWLINE print(list_1)NEWLINE print(list_2)NEWLINE print(list_3)NEWLINE print(list_4)NEWLINE print(final_list)NEWLINENEWLINE fac_list_1 = [] # Number of lectures by each batchNEWLINE fac_list_2 = []NEWLINE fac_list_3 = []NEWLINE fac_list_4 = []NEWLINENEWLINE for faculty_no in range(0, 4):NEWLINE x = int(final_list[faculty_no][0])NEWLINE for hour_cnt in range(0, x):NEWLINE fac_list_1.append(faculty_no)NEWLINENEWLINE x1 = int(final_list[faculty_no][1])NEWLINE for hour_cnt in range(0, x1):NEWLINE fac_list_2.append(faculty_no)NEWLINENEWLINE x2 = int(final_list[faculty_no][2])NEWLINE for hour_cnt in range(0, x2):NEWLINE fac_list_3.append(faculty_no)NEWLINENEWLINE x3 = int(final_list[faculty_no][3])NEWLINE for hour_cnt in range(0, x3):NEWLINE fac_list_4.append(faculty_no)NEWLINENEWLINE print(fac_list_1)NEWLINE print(fac_list_2)NEWLINE print(fac_list_3)NEWLINE print(fac_list_4)NEWLINENEWLINE semaphore_algo(fac_list_1, fac_list_2, fac_list_3, fac_list_4)NEWLINENEWLINENEWLINEtext1 = Label(window, text="Enter the faculty hours required for each branch")NEWLINEtext1.grid(row=0)NEWLINENEWLINEtext2 = Label(window, text="Branch Name")NEWLINEtext2_1 = Label(window, text="Faculty 1")NEWLINEtext2_2 = Label(window, text="Faculty 2")NEWLINEtext2_3 = Label(window, text="Faculty 3")NEWLINEtext2_4 = Label(window, text="Faculty 4")NEWLINEtext2.grid(row=1, column=0)NEWLINEtext2_1.grid(row=1, column=1)NEWLINEtext2_2.grid(row=1, column=2)NEWLINEtext2_3.grid(row=1, column=3)NEWLINEtext2_4.grid(row=1, column=4)NEWLINENEWLINEtext3 = Label(window, text="B.Tech CS")NEWLINElabel3_1 = Entry(window)NEWLINElabel3_2 = Entry(window)NEWLINElabel3_3 = Entry(window)NEWLINElabel3_4 = Entry(window)NEWLINEtext3.grid(row=2, column=0)NEWLINElabel3_1.grid(row=2, column=1)NEWLINElabel3_2.grid(row=2, column=2)NEWLINElabel3_3.grid(row=2, column=3)NEWLINElabel3_4.grid(row=2, column=4)NEWLINENEWLINEtext4 = Label(window, text="B.Tech IT")NEWLINElabel4_1 = Entry(window)NEWLINElabel4_2 = Entry(window)NEWLINElabel4_3 = Entry(window)NEWLINElabel4_4 = Entry(window)NEWLINEtext4.grid(row=3, column=0)NEWLINElabel4_1.grid(row=3, column=1)NEWLINElabel4_2.grid(row=3, column=2)NEWLINElabel4_3.grid(row=3, column=3)NEWLINElabel4_4.grid(row=3, column=4)NEWLINENEWLINEtext5 = Label(window, text="MBA.Tech CS")NEWLINElabel5_1 = Entry(window)NEWLINElabel5_2 = Entry(window)NEWLINElabel5_3 = Entry(window)NEWLINElabel5_4 = Entry(window)NEWLINEtext5.grid(row=4, column=0)NEWLINElabel5_1.grid(row=4, column=1)NEWLINElabel5_2.grid(row=4, column=2)NEWLINElabel5_3.grid(row=4, column=3)NEWLINElabel5_4.grid(row=4, column=4)NEWLINENEWLINEtext6 = Label(window, text="MBA.Tech IT")NEWLINElabel6_1 = Entry(window)NEWLINElabel6_2 = Entry(window)NEWLINElabel6_3 = Entry(window)NEWLINElabel6_4 = Entry(window)NEWLINEtext6.grid(row=5, column=0)NEWLINElabel6_1.grid(row=5, column=1)NEWLINElabel6_2.grid(row=5, column=2)NEWLINElabel6_3.grid(row=5, column=3)NEWLINElabel6_4.grid(row=5, column=4)NEWLINENEWLINEbutton1 = Button(window, text="Submit Request", command=set_values)NEWLINEbutton1.grid(row=6, column=2)NEWLINENEWLINEwindow.mainloop()NEWLINE |
from unittest import mockNEWLINENEWLINEimport pytestNEWLINENEWLINEfrom bgmi.lib.models import Bangumi, Filter, FollowedNEWLINEfrom bgmi.main import mainNEWLINEfrom bgmi.website.bangumi_moe import BangumiMoeNEWLINENEWLINENEWLINEdef test_gen_nginx_conf():NEWLINE main("gen nginx.conf --server-name _".split())[email protected]("_clean_bgmi")NEWLINEdef test_cal_force_update():NEWLINE class MockWebsite(BangumiMoe):NEWLINE def fetch_bangumi_calendar(self):NEWLINE bangumi = BangumiMoe().fetch_bangumi_calendar()NEWLINE bangumi[0].update_time = "Unknown"NEWLINE return bangumiNEWLINENEWLINE with mock.patch("bgmi.lib.controllers.website", MockWebsite()):NEWLINE main("cal -f".split())NEWLINE assert [NEWLINE x.name for x in Bangumi.select().where(Bangumi.update_time == "Unknown")NEWLINE ], "at least 1 bangumi's update_time is 'Unknown'"NEWLINENEWLINENEWLINEdef test_cal_config():NEWLINE main("config".split())NEWLINE main("config ADMIN_TOKEN 233".split())NEWLINE main("config DOWNLOAD_DELEGATE xunlei".split())NEWLINE main("config BANGUMI_MOE_URL https://bangumi.moe".split())NEWLINENEWLINENEWLINEdef test_add(bangumi_names):NEWLINE main(["add", *bangumi_names])[email protected]("_clean_bgmi")NEWLINEdef test_update(bangumi_names):NEWLINE main(["add", *bangumi_names])NEWLINE main(["update"])[email protected]("_clean_bgmi")NEWLINEdef test_update_single(bangumi_names):NEWLINE name = bangumi_names[0]NEWLINE main(f"add {name}".split())NEWLINE main(["update", name])[email protected]("_clean_bgmi")NEWLINEdef test_search(bangumi_names):NEWLINE main(["search", "海贼王", "--regex-filter", ".*MP4.*720P.*"])[email protected]("_clean_bgmi")NEWLINEdef test_delete(bangumi_names):NEWLINE name = bangumi_names[0]NEWLINE main(f"add {name} --episode 0".split())NEWLINE main(f"delete --name {name}".split())[email protected]("_clean_bgmi")NEWLINEdef test_delete_batch(bangumi_names):NEWLINE main(["add", *bangumi_names, "--episode", "0"])NEWLINE main("delete --clear-all --batch".split())[email protected]("_clean_bgmi")NEWLINEdef test_filter(bangumi_names):NEWLINE name = bangumi_names[0]NEWLINE main(f"add {name} --episode 0".split())NEWLINE main(["filter", name, "--subtitle", "", "--exclude", "MKV", "--regex", "720p|720P"])NEWLINE f = Filter.get(bangumi_name=name, exclude="MKV", regex="720p|720P")NEWLINE assert not f.includeNEWLINE assert not [email protected]("_clean_bgmi")NEWLINEdef test_fetch(bangumi_names):NEWLINE name = bangumi_names[0]NEWLINE main(f"add {name} --episode 0".split())NEWLINE main(f"fetch {name}".split())[email protected]("_clean_bgmi")NEWLINEdef test_mark(bangumi_names):NEWLINE name = bangumi_names[0]NEWLINE main(f"add {name} --episode 0".split())NEWLINE main(f"mark {name} 1".split())NEWLINE assert Followed.get(bangumi_name=name).episode == 1NEWLINE |
"""DSM 7 SYNO.Core.* datas."""NEWLINE |
"""NEWLINEMIT LicenseNEWLINENEWLINECopyright (c) 2020-present phenom4n4nNEWLINENEWLINEPermission is hereby granted, free of charge, to any person obtaining a copyNEWLINEof this software and associated documentation files (the "Software"), to dealNEWLINEin the Software without restriction, including without limitation the rightsNEWLINEto use, copy, modify, merge, publish, distribute, sublicense, and/or sellNEWLINEcopies of the Software, and to permit persons to whom the Software isNEWLINEfurnished to do so, subject to the following conditions:NEWLINENEWLINEThe above copyright notice and this permission notice shall be included in allNEWLINEcopies or substantial portions of the Software.NEWLINENEWLINETHE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS ORNEWLINEIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,NEWLINEFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THENEWLINEAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHERNEWLINELIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,NEWLINEOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THENEWLINESOFTWARE.NEWLINE"""NEWLINENEWLINEimport loggingNEWLINEfrom collections import defaultdictNEWLINEfrom colorsys import rgb_to_hsvNEWLINEfrom typing import List, OptionalNEWLINENEWLINEimport discordNEWLINEfrom redbot.core import commandsNEWLINEfrom redbot.core.utils.chat_formatting import humanize_number as hnNEWLINEfrom redbot.core.utils.chat_formatting import pagify, text_to_fileNEWLINEfrom redbot.core.utils.mod import get_audit_reasonNEWLINEfrom TagScriptEngine import Interpreter, LooseVariableGetterBlock, MemberAdapterNEWLINENEWLINEfrom .abc import MixinMetaNEWLINEfrom .converters import FuzzyRole, StrictRole, TargeterArgs, TouchableMemberNEWLINEfrom .utils import (NEWLINE can_run_command,NEWLINE guild_roughly_chunked,NEWLINE humanize_roles,NEWLINE is_allowed_by_role_hierarchy,NEWLINE)NEWLINENEWLINElog = logging.getLogger("red.phenom4n4n.roleutils")NEWLINENEWLINENEWLINEdef targeter_cog(ctx: commands.Context):NEWLINE cog = ctx.bot.get_cog("Targeter")NEWLINE return cog is not None and hasattr(cog, "args_to_list")NEWLINENEWLINENEWLINEdef chunks(l, n):NEWLINE """NEWLINE Yield successive n-sized chunks from l.NEWLINE https://github.com/flaree/flare-cogs/blob/08b78e33ab814aa4da5422d81a5037ae3df51d4e/commandstats/commandstats.py#L16NEWLINE """NEWLINE for i in range(0, len(l), n):NEWLINE yield l[i : i + n]NEWLINENEWLINENEWLINEclass Roles(MixinMeta):NEWLINE """NEWLINE Useful role commands.NEWLINE """NEWLINENEWLINE def __init__(self):NEWLINE self.interpreter = Interpreter([LooseVariableGetterBlock()])NEWLINE super().__init__()NEWLINENEWLINE async def initialize(self):NEWLINE log.debug("Roles Initialize")NEWLINE await super().initialize()NEWLINENEWLINE @commands.guild_only()NEWLINE @commands.group(invoke_without_command=True)NEWLINE async def role(NEWLINE self, ctx: commands.Context, member: TouchableMember(False), *, role: StrictRole(False)NEWLINE ):NEWLINE """Base command for modifying roles.NEWLINENEWLINE Invoking this command will add or remove the given role from the member, depending on whether they already had it."""NEWLINE if role in member.roles and await can_run_command(ctx, "role remove"):NEWLINE com = self.bot.get_command("role remove")NEWLINE await ctx.invoke(NEWLINE com,NEWLINE member=member,NEWLINE role=role,NEWLINE )NEWLINE elif role not in member.roles and await can_run_command(ctx, "role add"):NEWLINE com = self.bot.get_command("role add")NEWLINE await ctx.invoke(NEWLINE com,NEWLINE member=member,NEWLINE role=role,NEWLINE )NEWLINE else:NEWLINE await ctx.send_help()NEWLINENEWLINE @commands.bot_has_permissions(embed_links=True)NEWLINE @role.command("info")NEWLINE async def role_info(self, ctx: commands.Context, *, role: FuzzyRole):NEWLINE """Get information about a role."""NEWLINE await ctx.send(embed=await self.get_info(role))NEWLINENEWLINE async def get_info(self, role: discord.Role) -> discord.Embed:NEWLINE if guild_roughly_chunked(role.guild) is False and self.bot.intents.members:NEWLINE await role.guild.chunk()NEWLINE description = [NEWLINE f"{role.mention}",NEWLINE f"Members: {len(role.members)} | Position: {role.position}",NEWLINE f"Color: {role.color}",NEWLINE f"Hoisted: {role.hoist}",NEWLINE f"Mentionable: {role.mentionable}",NEWLINE ]NEWLINE if role.managed:NEWLINE description.append(f"Managed: {role.managed}")NEWLINE if role in await self.bot.get_mod_roles(role.guild):NEWLINE description.append(f"Mod Role: True")NEWLINE if role in await self.bot.get_admin_roles(role.guild):NEWLINE description.append(f"Admin Role: True")NEWLINE e = discord.Embed(NEWLINE color=role.color,NEWLINE title=role.name,NEWLINE description="\n".join(description),NEWLINE timestamp=role.created_at,NEWLINE )NEWLINE e.set_footer(text=role.id)NEWLINE return eNEWLINENEWLINE def format_member(self, member: discord.Member, formatting: str) -> str:NEWLINE output = self.interpreter.process(formatting, {"member": MemberAdapter(member)})NEWLINE return output.bodyNEWLINENEWLINE @commands.bot_has_permissions(attach_files=True)NEWLINE @commands.has_guild_permissions(manage_roles=True)NEWLINE @role.command("members", aliases=["dump"])NEWLINE async def role_members(NEWLINE self,NEWLINE ctx: commands.Context,NEWLINE role: FuzzyRole,NEWLINE *,NEWLINE formatting: str = "{member} - {member(id)}",NEWLINE ):NEWLINE """NEWLINE Sends a list of members in a role.NEWLINENEWLINE You can supply a custom formatting tagscript for each member.NEWLINE The [member](https://phen-cogs.readthedocs.io/en/latest/tags/default_variables.html#author-block) block is available to use, found on the [TagScript documentation](https://phen-cogs.readthedocs.io/en/latest/index.html).NEWLINENEWLINE **Example:**NEWLINE `[p]role dump @admin <t:{member(timestamp)}> - {member(mention)}`NEWLINE """NEWLINE if guild_roughly_chunked(ctx.guild) is False and self.bot.intents.members:NEWLINE await ctx.guild.chunk()NEWLINE if not role.members:NEWLINE return await ctx.send(f"**{role}** has no members.")NEWLINE members = "\n".join(self.format_member(member, formatting) for member in role.members)NEWLINE if len(members) > 2000:NEWLINE await ctx.send(file=text_to_file(members, f"members.txt"))NEWLINE else:NEWLINE await ctx.send(members, allowed_mentions=discord.AllowedMentions.none())NEWLINENEWLINE @staticmethodNEWLINE def get_hsv(role: discord.Role):NEWLINE return rgb_to_hsv(*role.color.to_rgb())NEWLINENEWLINE @commands.bot_has_permissions(embed_links=True)NEWLINE @commands.has_guild_permissions(manage_roles=True)NEWLINE @role.command("colors")NEWLINE async def role_colors(self, ctx: commands.Context):NEWLINE """Sends the server's roles, ordered by color."""NEWLINE roles = defaultdict(list)NEWLINE for r in ctx.guild.roles:NEWLINE roles[str(r.color)].append(r)NEWLINE roles = dict(sorted(roles.items(), key=lambda v: self.get_hsv(v[1][0])))NEWLINENEWLINE lines = [f"**{color}**\n{' '.join(r.mention for r in rs)}" for color, rs in roles.items()]NEWLINE for page in pagify("\n".join(lines)):NEWLINE e = discord.Embed(description=page)NEWLINE await ctx.send(embed=e)NEWLINENEWLINE @commands.bot_has_permissions(manage_roles=True)NEWLINE @commands.has_guild_permissions(manage_roles=True)NEWLINE @role.command("create")NEWLINE async def role_create(NEWLINE self,NEWLINE ctx: commands.Context,NEWLINE color: Optional[discord.Color] = discord.Color.default(),NEWLINE hoist: Optional[bool] = False,NEWLINE *,NEWLINE name: str = None,NEWLINE ):NEWLINE """NEWLINE Creates a role.NEWLINENEWLINE Color and whether it is hoisted can be specified.NEWLINE """NEWLINE if len(ctx.guild.roles) >= 250:NEWLINE return await ctx.send("This server has reached the maximum role limit (250).")NEWLINENEWLINE role = await ctx.guild.create_role(name=name, colour=color, hoist=hoist)NEWLINE await ctx.send(f"**{role}** created!", embed=await self.get_info(role))NEWLINENEWLINE @commands.has_guild_permissions(manage_roles=True)NEWLINE @commands.bot_has_permissions(manage_roles=True)NEWLINE @role.command("color", aliases=["colour"])NEWLINE async def role_color(NEWLINE self, ctx: commands.Context, role: StrictRole(check_integrated=False), color: discord.ColorNEWLINE ):NEWLINE """Change a role's color."""NEWLINE await role.edit(color=color)NEWLINE await ctx.send(NEWLINE f"**{role}** color changed to **{color}**.", embed=await self.get_info(role)NEWLINE )NEWLINENEWLINE @commands.has_guild_permissions(manage_roles=True)NEWLINE @commands.bot_has_permissions(manage_roles=True)NEWLINE @role.command("hoist")NEWLINE async def role_hoist(NEWLINE self,NEWLINE ctx: commands.Context,NEWLINE role: StrictRole(check_integrated=False),NEWLINE hoisted: bool = None,NEWLINE ):NEWLINE """Toggle whether a role should appear seperate from other roles."""NEWLINE hoisted = hoisted if hoisted is not None else not role.hoistNEWLINE await role.edit(hoist=hoisted)NEWLINE now = "now" if hoisted else "no longer"NEWLINE await ctx.send(f"**{role}** is {now} hoisted.", embed=await self.get_info(role))NEWLINENEWLINE @commands.has_guild_permissions(manage_roles=True)NEWLINE @commands.bot_has_permissions(manage_roles=True)NEWLINE @role.command("name")NEWLINE async def role_name(NEWLINE self, ctx: commands.Context, role: StrictRole(check_integrated=False), *, name: strNEWLINE ):NEWLINE """Change a role's name."""NEWLINE old_name = role.nameNEWLINE await role.edit(name=name)NEWLINE await ctx.send(f"Changed **{old_name}** to **{name}**.", embed=await self.get_info(role))NEWLINENEWLINE @commands.has_guild_permissions(manage_roles=True)NEWLINE @commands.bot_has_permissions(manage_roles=True)NEWLINE @role.command("add")NEWLINE async def role_add(self, ctx: commands.Context, member: TouchableMember, *, role: StrictRole):NEWLINE """Add a role to a member."""NEWLINE if role in member.roles:NEWLINE await ctx.send(NEWLINE f"**{member}** already has the role **{role}**. Maybe try removing it instead."NEWLINE )NEWLINE returnNEWLINE reason = get_audit_reason(ctx.author)NEWLINE await member.add_roles(role, reason=reason)NEWLINE await ctx.send(f"Added **{role.name}** to **{member}**.")NEWLINENEWLINE @commands.has_guild_permissions(manage_roles=True)NEWLINE @commands.bot_has_permissions(manage_roles=True)NEWLINE @role.command("remove")NEWLINE async def role_remove(NEWLINE self, ctx: commands.Context, member: TouchableMember, *, role: StrictRoleNEWLINE ):NEWLINE """Remove a role from a member."""NEWLINE if role not in member.roles:NEWLINE await ctx.send(NEWLINE f"**{member}** doesn't have the role **{role}**. Maybe try adding it instead."NEWLINE )NEWLINE returnNEWLINE reason = get_audit_reason(ctx.author)NEWLINE await member.remove_roles(role, reason=reason)NEWLINE await ctx.send(f"Removed **{role.name}** from **{member}**.")NEWLINENEWLINE @commands.has_guild_permissions(manage_roles=True)NEWLINE @commands.bot_has_permissions(manage_roles=True)NEWLINE @role.command(require_var_positional=True)NEWLINE async def addmulti(self, ctx: commands.Context, role: StrictRole, *members: TouchableMember):NEWLINE """Add a role to multiple members."""NEWLINE reason = get_audit_reason(ctx.author)NEWLINE already_members = []NEWLINE success_members = []NEWLINE for member in members:NEWLINE if role not in member.roles:NEWLINE await member.add_roles(role, reason=reason)NEWLINE success_members.append(member)NEWLINE else:NEWLINE already_members.append(member)NEWLINE msg = []NEWLINE if success_members:NEWLINE msg.append(f"Added **{role}** to {humanize_roles(success_members)}.")NEWLINE if already_members:NEWLINE msg.append(f"{humanize_roles(already_members)} already had **{role}**.")NEWLINE await ctx.send("\n".join(msg))NEWLINENEWLINE @commands.has_guild_permissions(manage_roles=True)NEWLINE @commands.bot_has_permissions(manage_roles=True)NEWLINE @role.command(require_var_positional=True)NEWLINE async def removemulti(NEWLINE self, ctx: commands.Context, role: StrictRole, *members: TouchableMemberNEWLINE ):NEWLINE """Remove a role from multiple members."""NEWLINE reason = get_audit_reason(ctx.author)NEWLINE already_members = []NEWLINE success_members = []NEWLINE for member in members:NEWLINE if role in member.roles:NEWLINE await member.remove_roles(role, reason=reason)NEWLINE success_members.append(member)NEWLINE else:NEWLINE already_members.append(member)NEWLINE msg = []NEWLINE if success_members:NEWLINE msg.append(f"Removed **{role}** from {humanize_roles(success_members)}.")NEWLINE if already_members:NEWLINE msg.append(f"{humanize_roles(already_members)} didn't have **{role}**.")NEWLINE await ctx.send("\n".join(msg))NEWLINENEWLINE @commands.has_guild_permissions(manage_roles=True)NEWLINE @commands.bot_has_permissions(manage_roles=True)NEWLINE @commands.group(invoke_without_command=True, require_var_positional=True)NEWLINE async def multirole(self, ctx: commands.Context, member: TouchableMember, *roles: StrictRole):NEWLINE """Add multiple roles to a member."""NEWLINE not_allowed = []NEWLINE already_added = []NEWLINE to_add = []NEWLINE for role in roles:NEWLINE allowed = await is_allowed_by_role_hierarchy(self.bot, ctx.me, ctx.author, role)NEWLINE if not allowed[0]:NEWLINE not_allowed.append(role)NEWLINE elif role in member.roles:NEWLINE already_added.append(role)NEWLINE else:NEWLINE to_add.append(role)NEWLINE reason = get_audit_reason(ctx.author)NEWLINE msg = []NEWLINE if to_add:NEWLINE await member.add_roles(*to_add, reason=reason)NEWLINE msg.append(f"Added {humanize_roles(to_add)} to **{member}**.")NEWLINE if already_added:NEWLINE msg.append(f"**{member}** already had {humanize_roles(already_added)}.")NEWLINE if not_allowed:NEWLINE msg.append(NEWLINE f"You do not have permission to assign the roles {humanize_roles(not_allowed)}."NEWLINE )NEWLINE await ctx.send("\n".join(msg))NEWLINENEWLINE @commands.has_guild_permissions(manage_roles=True)NEWLINE @commands.bot_has_permissions(manage_roles=True)NEWLINE @multirole.command("remove", require_var_positional=True)NEWLINE async def multirole_remove(NEWLINE self, ctx: commands.Context, member: TouchableMember, *roles: StrictRoleNEWLINE ):NEWLINE """Remove multiple roles from a member."""NEWLINE not_allowed = []NEWLINE not_added = []NEWLINE to_rm = []NEWLINE for role in roles:NEWLINE allowed = await is_allowed_by_role_hierarchy(self.bot, ctx.me, ctx.author, role)NEWLINE if not allowed[0]:NEWLINE not_allowed.append(role)NEWLINE elif role not in member.roles:NEWLINE not_added.append(role)NEWLINE else:NEWLINE to_rm.append(role)NEWLINE reason = get_audit_reason(ctx.author)NEWLINE msg = []NEWLINE if to_rm:NEWLINE await member.remove_roles(*to_rm, reason=reason)NEWLINE msg.append(f"Removed {humanize_roles(to_rm)} from **{member}**.")NEWLINE if not_added:NEWLINE msg.append(f"**{member}** didn't have {humanize_roles(not_added)}.")NEWLINE if not_allowed:NEWLINE msg.append(NEWLINE f"You do not have permission to assign the roles {humanize_roles(not_allowed)}."NEWLINE )NEWLINE await ctx.send("\n".join(msg))NEWLINENEWLINE @commands.has_guild_permissions(manage_roles=True)NEWLINE @commands.bot_has_permissions(manage_roles=True)NEWLINE @role.command()NEWLINE async def all(self, ctx: commands.Context, *, role: StrictRole):NEWLINE """Add a role to all members of the server."""NEWLINE await self.super_massrole(ctx, ctx.guild.members, role)NEWLINENEWLINE @commands.has_guild_permissions(manage_roles=True)NEWLINE @commands.bot_has_permissions(manage_roles=True)NEWLINE @role.command(aliases=["removeall"])NEWLINE async def rall(self, ctx: commands.Context, *, role: StrictRole):NEWLINE """Remove a role from all members of the server."""NEWLINE member_list = self.get_member_list(ctx.guild.members, role, False)NEWLINE await self.super_massrole(NEWLINE ctx, member_list, role, "No one on the server has this role.", FalseNEWLINE )NEWLINENEWLINE @commands.has_guild_permissions(manage_roles=True)NEWLINE @commands.bot_has_permissions(manage_roles=True)NEWLINE @role.command()NEWLINE async def humans(self, ctx: commands.Context, *, role: StrictRole):NEWLINE """Add a role to all humans (non-bots) in the server."""NEWLINE await self.super_massrole(NEWLINE ctx,NEWLINE [member for member in ctx.guild.members if not member.bot],NEWLINE role,NEWLINE "Every human in the server has this role.",NEWLINE )NEWLINENEWLINE @commands.has_guild_permissions(manage_roles=True)NEWLINE @commands.bot_has_permissions(manage_roles=True)NEWLINE @role.command()NEWLINE async def rhumans(self, ctx: commands.Context, *, role: StrictRole):NEWLINE """Remove a role from all humans (non-bots) in the server."""NEWLINE await self.super_massrole(NEWLINE ctx,NEWLINE [member for member in ctx.guild.members if not member.bot],NEWLINE role,NEWLINE "None of the humans in the server have this role.",NEWLINE False,NEWLINE )NEWLINENEWLINE @commands.has_guild_permissions(manage_roles=True)NEWLINE @commands.bot_has_permissions(manage_roles=True)NEWLINE @role.command()NEWLINE async def bots(self, ctx: commands.Context, *, role: StrictRole):NEWLINE """Add a role to all bots in the server."""NEWLINE await self.super_massrole(NEWLINE ctx,NEWLINE [member for member in ctx.guild.members if member.bot],NEWLINE role,NEWLINE "Every bot in the server has this role.",NEWLINE )NEWLINENEWLINE @commands.has_guild_permissions(manage_roles=True)NEWLINE @commands.bot_has_permissions(manage_roles=True)NEWLINE @role.command()NEWLINE async def rbots(self, ctx: commands.Context, *, role: StrictRole):NEWLINE """Remove a role from all bots in the server."""NEWLINE await self.super_massrole(NEWLINE ctx,NEWLINE [member for member in ctx.guild.members if member.bot],NEWLINE role,NEWLINE "None of the bots in the server have this role.",NEWLINE False,NEWLINE )NEWLINENEWLINE @commands.has_guild_permissions(manage_roles=True)NEWLINE @commands.bot_has_permissions(manage_roles=True)NEWLINE @role.command("in")NEWLINE async def role_in(NEWLINE self, ctx: commands.Context, target_role: FuzzyRole, *, add_role: StrictRoleNEWLINE ):NEWLINE """Add a role to all members of a another role."""NEWLINE await self.super_massrole(NEWLINE ctx,NEWLINE [member for member in target_role.members],NEWLINE add_role,NEWLINE f"Every member of **{target_role}** has this role.",NEWLINE )NEWLINENEWLINE @commands.has_guild_permissions(manage_roles=True)NEWLINE @commands.bot_has_permissions(manage_roles=True)NEWLINE @role.command("rin")NEWLINE async def role_rin(NEWLINE self, ctx: commands.Context, target_role: FuzzyRole, *, remove_role: StrictRoleNEWLINE ):NEWLINE """Remove a role from all members of a another role."""NEWLINE await self.super_massrole(NEWLINE ctx,NEWLINE [member for member in target_role.members],NEWLINE remove_role,NEWLINE f"No one in **{target_role}** has this role.",NEWLINE False,NEWLINE )NEWLINENEWLINE @commands.check(targeter_cog)NEWLINE @commands.has_guild_permissions(manage_roles=True)NEWLINE @commands.bot_has_permissions(manage_roles=True)NEWLINE @role.group()NEWLINE async def target(self, ctx: commands.Context):NEWLINE """NEWLINE Modify roles using 'targeting' args.NEWLINENEWLINE An explanation of Targeter and test commands to preview the members affected can be found with `[p]target`.NEWLINE """NEWLINENEWLINE @target.command("add")NEWLINE async def target_add(self, ctx: commands.Context, role: StrictRole, *, args: TargeterArgs):NEWLINE """NEWLINE Add a role to members using targeting args.NEWLINENEWLINE An explanation of Targeter and test commands to preview the members affected can be found with `[p]target`.NEWLINE """NEWLINE await self.super_massrole(NEWLINE ctx,NEWLINE args,NEWLINE role,NEWLINE f"No one was found with the given args that was eligible to recieve **{role}**.",NEWLINE )NEWLINENEWLINE @target.command("remove")NEWLINE async def target_remove(self, ctx: commands.Context, role: StrictRole, *, args: TargeterArgs):NEWLINE """NEWLINE Remove a role from members using targeting args.NEWLINENEWLINE An explanation of Targeter and test commands to preview the members affected can be found with `[p]target`.NEWLINE """NEWLINE await self.super_massrole(NEWLINE ctx,NEWLINE args,NEWLINE role,NEWLINE f"No one was found with the given args that was eligible have **{role}** removed from them.",NEWLINE False,NEWLINE )NEWLINENEWLINE async def super_massrole(NEWLINE self,NEWLINE ctx: commands.Context,NEWLINE members: list,NEWLINE role: discord.Role,NEWLINE fail_message: str = "Everyone in the server has this role.",NEWLINE adding: bool = True,NEWLINE ):NEWLINE if guild_roughly_chunked(ctx.guild) is False and self.bot.intents.members:NEWLINE await ctx.guild.chunk()NEWLINE member_list = self.get_member_list(members, role, adding)NEWLINE if not member_list:NEWLINE await ctx.send(fail_message)NEWLINE returnNEWLINE verb = "add" if adding else "remove"NEWLINE word = "to" if adding else "from"NEWLINE await ctx.send(NEWLINE f"Beginning to {verb} **{role.name}** {word} **{len(member_list)}** members."NEWLINE )NEWLINE async with ctx.typing():NEWLINE result = await self.massrole(member_list, [role], get_audit_reason(ctx.author), adding)NEWLINE result_text = f"{verb.title()[:5]}ed **{role.name}** {word} **{len(result['completed'])}** members."NEWLINE if result["skipped"]:NEWLINE result_text += (NEWLINE f"\nSkipped {verb[:5]}ing roles for **{len(result['skipped'])}** members."NEWLINE )NEWLINE if result["failed"]:NEWLINE result_text += (NEWLINE f"\nFailed {verb[:5]}ing roles for **{len(result['failed'])}** members."NEWLINE )NEWLINE await ctx.send(result_text)NEWLINENEWLINE def get_member_list(self, members: list, role: discord.Role, adding: bool = True):NEWLINE if adding:NEWLINE members = [member for member in members if role not in member.roles]NEWLINE else:NEWLINE members = [member for member in members if role in member.roles]NEWLINE return membersNEWLINENEWLINE async def massrole(self, members: list, roles: list, reason: str, adding: bool = True):NEWLINE completed = []NEWLINE skipped = []NEWLINE failed = []NEWLINE for member in members:NEWLINE if adding:NEWLINE to_add = [role for role in roles if role not in member.roles]NEWLINE if to_add:NEWLINE try:NEWLINE await member.add_roles(*to_add, reason=reason)NEWLINE except Exception as e:NEWLINE failed.append(member)NEWLINE log.exception(f"Failed to add roles to {member}", exc_info=e)NEWLINE else:NEWLINE completed.append(member)NEWLINE else:NEWLINE skipped.append(member)NEWLINE else:NEWLINE to_remove = [role for role in roles if role in member.roles]NEWLINE if to_remove:NEWLINE try:NEWLINE await member.remove_roles(*to_remove, reason=reason)NEWLINE except Exception as e:NEWLINE failed.append(member)NEWLINE log.exception(f"Failed to remove roles from {member}", exc_info=e)NEWLINE else:NEWLINE completed.append(member)NEWLINE else:NEWLINE skipped.append(member)NEWLINE return {"completed": completed, "skipped": skipped, "failed": failed}NEWLINENEWLINE @staticmethodNEWLINE def format_members(members: List[discord.Member]):NEWLINE length = len(members)NEWLINE s = "" if length == 1 else "s"NEWLINE return f"**{hn(length)}** member{s}"NEWLINENEWLINE @role.command("uniquemembers", aliases=["um"], require_var_positional=True)NEWLINE async def role_uniquemembers(self, ctx: commands.Context, *roles: FuzzyRole):NEWLINE """NEWLINE View the total unique members between multiple roles.NEWLINE """NEWLINE roles_length = len(roles)NEWLINE if roles_length == 1:NEWLINE raise commands.UserFeedbackCheckFailure("You must provide at least 2 roles.")NEWLINE if not ctx.guild.chunked:NEWLINE await ctx.guild.chunk()NEWLINE color = roles[0].colorNEWLINE unique_members = set()NEWLINE description = []NEWLINE for role in roles:NEWLINE unique_members.update(role.members)NEWLINE description.append(f"{role.mention}: {self.format_members(role.members)}")NEWLINE description.insert(0, f"**Unique members**: {self.format_members(unique_members)}")NEWLINE e = discord.Embed(NEWLINE color=color,NEWLINE title=f"Unique members between {roles_length} roles",NEWLINE description="\n".join(description),NEWLINE )NEWLINE ref = ctx.message.to_reference(fail_if_not_exists=False)NEWLINE await ctx.send(embed=e, reference=ref)NEWLINE |
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.NEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINENEWLINEimport argparseNEWLINEimport osNEWLINEimport randomNEWLINEimport timeNEWLINEimport mathNEWLINEimport sysNEWLINEfrom functools import partialNEWLINENEWLINEimport numpy as npNEWLINEimport paddleNEWLINENEWLINEimport paddlenlp as ppnlpNEWLINEfrom paddlenlp.transformers import LinearDecayWithWarmupNEWLINEfrom paddlenlp.metrics import ChunkEvaluatorNEWLINEfrom paddlenlp.datasets import load_datasetNEWLINEfrom paddlenlp.data import Stack, Tuple, PadNEWLINEfrom paddlenlp.utils.log import loggerNEWLINENEWLINE# from paddlenlp.trainer.trainer_base import TrainerBaseNEWLINEsys.path.insert(0, os.path.abspath("."))NEWLINEfrom utils import DictNEWLINENEWLINENEWLINEdef tokenize_and_align_labels(example, tokenizer, no_entity_id,NEWLINE max_seq_len=512):NEWLINE labels = example['labels']NEWLINE example = example['tokens']NEWLINE tokenized_input = tokenizer(NEWLINE example,NEWLINE is_split_into_words=True,NEWLINE max_seq_len=max_seq_len, )NEWLINENEWLINE # -2 for [CLS] and [SEP]NEWLINE if len(tokenized_input['input_ids']) - 2 < len(labels):NEWLINE labels = labels[:len(tokenized_input['input_ids']) - 2]NEWLINE tokenized_input['labels'] = [no_entity_id] + labels + [no_entity_id]NEWLINE tokenized_input['labels'] += [no_entity_id] * (NEWLINE len(tokenized_input['input_ids']) - len(tokenized_input['labels']))NEWLINENEWLINE return tokenized_inputNEWLINENEWLINENEWLINEdef ner_collator(tokenizer, args):NEWLINE batchify_fn = lambda samples, fn=Dict({NEWLINE 'input_ids': Pad(axis=0, pad_val=tokenizer.pad_token_id, dtype='int32'), # inputNEWLINE 'token_type_ids': Pad(axis=0, pad_val=tokenizer.pad_token_type_id, dtype='int32'), # segmentNEWLINE 'labels': Pad(axis=0, pad_val=args.ignore_label, dtype='int64') # labelNEWLINE }): fn(samples)NEWLINENEWLINE return batchify_fnNEWLINENEWLINENEWLINEdef ner_trans_fn(example, tokenizer, args):NEWLINE return tokenize_and_align_labels(NEWLINE example,NEWLINE tokenizer=tokenizer,NEWLINE no_entity_id=args.no_entity_id,NEWLINE max_seq_len=args.max_seq_length)NEWLINE |
#!/usr/bin/env python2.7NEWLINENEWLINEimport argparseNEWLINEimport randomNEWLINEimport reNEWLINEimport stringNEWLINENEWLINEparser = argparse.ArgumentParser(description='Email Campaign Formatter')NEWLINEparser.add_argument('-s', action="store", dest='subject', default="CHANGE SUBJECT", help="email subject surrounded by double-quotes")NEWLINEparser.add_argument('-d', action="store", dest='domain', default='127.0.0.1', help='FQDN emails come from')NEWLINEparser.add_argument('-u', action="store", dest='username', default='jdoe', help='sender short email name')NEWLINEparser.add_argument('-f', action="store", dest='firstname', default='John', help='first name of sender')NEWLINEparser.add_argument('-l', action="store", dest='lastname', default='Doe', help='last name of sender')NEWLINEparser.add_argument('-e', action="store", dest='email', default=[], help='plain text email body file')NEWLINEparser.add_argument('-S', action="store", dest='signature', default=[], help='plain text signature file')NEWLINEparser.add_argument('-L', action="store", dest='link', default='', help='HTA link')NEWLINEparser.add_argument('-P', action="store", dest='pixel', default='', help='pixel link')NEWLINENEWLINEresults = parser.parse_args()NEWLINENEWLINEboundaryID = ''.join(random.SystemRandom().choice(string.digits + string.ascii_lowercase + string.digits) for _ in range(28))NEWLINENEWLINEheader = '\nMIME-Version: 1.0\n'NEWLINEheader += 'Subject: ' + results.subject + '\n'NEWLINEheader += 'From: ' + results.firstname + ' ' + results.lastname + ' <' + results.username + "@" + results.domain + '>' + '\n'NEWLINEheader += 'Content-Type: multipart/alternative; boundary=' + boundaryID + '\n'NEWLINENEWLINEaltTextContent = '\n--' + boundaryID + '\n'NEWLINEaltTextContent += 'Content-Type: text/plain; charset="UTF-8"\n\n'NEWLINENEWLINEhttpContent = '\n--' + boundaryID + '\n'NEWLINEhttpContent += 'Content-Type: text/html; charset="UTF-8"\n'NEWLINEhttpContent += 'Content-Transfer-Encoding: quoted-printable\n\n'NEWLINENEWLINEfooter = '\n\n--' + boundaryID + '--\n'NEWLINENEWLINEbody = ''NEWLINEbodyDiv = '<div dir=3D"ltr"><div>'NEWLINEwith open(results.email) as f:NEWLINE for line in f:NEWLINE body += lineNEWLINE bodyDiv += '<div>' + "<br>".join(line.split("\n")) + '</div>'NEWLINENEWLINEif results.link == '':NEWLINE with open(results.signature) as f:NEWLINE for line in f:NEWLINE body += lineNEWLINE bodyDiv += '<div>' + "<br>".join(line.split("\n")) + '</div>'NEWLINEelse:NEWLINE body += results.link + '\n'NEWLINE bodyDiv += '<a href=3D"' + results.link + '">' + results.link + "</a>"NEWLINE with open(results.signature) as f:NEWLINE for line in f:NEWLINE body += lineNEWLINE bodyDiv += '<div>' + "<br>".join(line.split("\n")) + '</div>'NEWLINENEWLINEif results.pixel == '':NEWLINE bodyDiv += '<div>' + "<br>".join(line.split("\n")) + '</div>'NEWLINEelse:NEWLINE context = '<img src="' + results.pixel + '">'NEWLINE bodyDiv += '<div>' + "<br>".join(line.split("\n")) + context + '</div>'NEWLINENEWLINEbodyDiv += "</div></div>"NEWLINEbodyHTTP = "=\n".join(re.findall("(?s).{,68}", bodyDiv))[:-1]NEWLINEemail = header + altTextContent +body + httpContent + bodyHTTP + footerNEWLINENEWLINEprint(email)NEWLINE |