file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
types.rs | /// A temperature measurement.
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub struct Temperature(i32);
/// A humidity measurement.
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub struct Humidity(i32);
/// A combined temperature / humidity measurement.
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub struct Measurement {
/// The measured temperature.
pub temperature: Temperature,
/// The measured humidity.
pub humidity: Humidity,
}
/// A combined raw temperature / humidity measurement.
///
/// The raw values are of type u16. They require a conversion formula for
/// conversion to a temperature / humidity value (see datasheet).
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub struct RawMeasurement {
/// The measured temperature (raw value).
pub temperature: u16,
/// The measured humidity (raw value).
pub humidity: u16,
}
impl From<RawMeasurement> for Measurement {
fn from(other: RawMeasurement) -> Self {
Self {
temperature: Temperature::from_raw(other.temperature),
humidity: Humidity::from_raw(other.humidity),
}
}
}
impl Temperature {
/// Create a new `Temperature` from a raw measurement result.
pub fn from_raw(raw: u16) -> Self {
Self(convert_temperature(raw))
}
/// Return temperature in milli-degrees celsius.
pub fn as_millidegrees_celsius(&self) -> i32 {
self.0
}
/// Return temperature in degrees celsius.
pub fn as_degrees_celsius(&self) -> f32 {
self.0 as f32 / 1000.0
}
}
impl Humidity {
/// Create a new `Humidity` from a raw measurement result.
pub fn from_raw(raw: u16) -> Self {
Self(convert_humidity(raw))
}
/// Return relative humidity in 1/1000 %RH.
pub fn as_millipercent(&self) -> i32 {
self.0
}
/// Return relative humidity in %RH.
pub fn as_percent(&self) -> f32 {
self.0 as f32 / 1000.0
}
}
/// Convert raw temperature measurement to milli-degrees celsius.
///
/// Formula (datasheet 5.11): -45 + 175 * (val / 2^16),
/// optimized for fixed point math.
#[inline]
fn convert_temperature(temp_raw: u16) -> i32 {
(((temp_raw as u32) * 21875) >> 13) as i32 - 45000
}
/// Convert raw humidity measurement to relative humidity.
///
/// Formula (datasheet 5.11): 100 * (val / 2^16),
/// optimized for fixed point math.
#[inline]
fn | (humi_raw: u16) -> i32 {
(((humi_raw as u32) * 12500) >> 13) as i32
}
#[cfg(test)]
mod tests {
use super::*;
/// Test conversion of raw measurement results into °C.
#[test]
fn test_convert_temperature() {
let test_data = [
(0x0000, -45000),
// Datasheet setion 5.11 "Conversion of Sensor Output"
(((0b0110_0100 as u16) << 8) | 0b1000_1011, 23730),
];
for td in &test_data {
assert_eq!(convert_temperature(td.0), td.1);
}
}
/// Test conversion of raw measurement results into %RH.
#[test]
fn test_convert_humidity() {
let test_data = [
(0x0000, 0),
// Datasheet setion 5.11 "Conversion of Sensor Output"
(((0b1010_0001 as u16) << 8) | 0b0011_0011, 62968),
];
for td in &test_data {
assert_eq!(convert_humidity(td.0), td.1);
}
}
/// Test conversion of raw measurement results into °C and %RH.
#[test]
fn measurement_conversion() {
// Datasheet setion 5.11 "Conversion of Sensor Output"
let temperature = convert_temperature(((0b0110_0100 as u16) << 8) | 0b1000_1011);
let humidity = convert_humidity(((0b1010_0001 as u16) << 8) | 0b0011_0011);
assert_eq!(temperature, 23730);
assert_eq!(humidity, 62968);
}
#[test]
fn temperature() {
let temp = Temperature(24123);
assert_eq!(temp.as_millidegrees_celsius(), 24123);
assert_eq!(temp.as_degrees_celsius(), 24.123);
}
#[test]
fn humidity() {
let humi = Humidity(65432);
assert_eq!(humi.as_millipercent(), 65432);
assert_eq!(humi.as_percent(), 65.432);
}
#[test]
fn measurement_from_into() {
// Datasheet setion 5.11 "Conversion of Sensor Output"
let raw = RawMeasurement {
temperature: ((0b0110_0100 as u16) << 8) | 0b1000_1011,
humidity: ((0b1010_0001 as u16) << 8) | 0b0011_0011,
};
// std::convert::From
let measurement1 = Measurement::from(raw);
assert_eq!(measurement1.temperature.0, 23730);
assert_eq!(measurement1.humidity.0, 62968);
// std::convert::Into
let measurement2: Measurement = raw.into();
assert_eq!(measurement2.temperature.0, 23730);
assert_eq!(measurement2.humidity.0, 62968);
// std::cmp::PartialEq
assert_eq!(measurement1, measurement2);
}
}
| convert_humidity |
ld-header-with-filter.ts | import {
LitElement, css, customElement, html, property
} from 'lit-element';
import '@polymer/paper-input/paper-input';
import '@polymer/paper-icon-button/paper-icon-button';
import type { Language, Resources } from '../localize';
import Localize from '../localize';
import { ironFlexLayoutAlignTheme, ironFlexLayoutTheme } from '../iron-flex-import';
@customElement('ld-header-with-filter')
export class LdHeaderWithFilter extends Localize(LitElement) {
@property({ type: String }) header = '';
@property({ type: String }) direction: '' | 'asc' | 'desc' = '';
@property({ type: Boolean }) active = false;
@property({ type: String }) filterValue: string | null = null;
@property({ type: String }) property = '';
language: Language | null = 'en';
resources: Resources | null = {
en: {
search: 'Search',
clear: 'Clear',
},
'en-en': {
search: 'Search',
clear: 'Clear',
},
'en-US': {
search: 'Search',
clear: 'Clear',
},
'en-us': {
search: 'Search',
clear: 'Clear',
},
fr: {
search: 'Rechercher',
clear: 'Effacer',
},
'fr-fr': {
search: 'Rechercher',
clear: 'Effacer',
},
};
static get styles() {
const mainStyle = css`
:host {
display: block;
}
paper-input {
min-width: var(--paper-datatable-api-min-width-input-filter, 120px);
}
paper-icon-button {
padding: 0;
width: 24px;
height: 24px;
}
.header {
margin-right: 16px;
}`;
return [mainStyle, ironFlexLayoutTheme, ironFlexLayoutAlignTheme];
}
render() {
let content = html`
<div class="layout horizontal center">
<div class="header" @tap="${this.toggleActive.bind(this)}">
${this.header}
</div>
<paper-icon-button id="searchBtn" slot="actions" icon="search" @tap="${this.toggleActive.bind(this)}"></paper-icon-button>
<paper-tooltip for="searchBtn" slot="actions">${this.localize('search')}</paper-tooltip>
</div>
`;
if (this.active) {
content = html`
<style>
paper-input {
--paper-input-container-underline-focus: {
display: block;
};
--paper-input-container-label: {
position: initial;
};
--paper-input-container: {
padding: 0;
}; | }
paper-icon-button {
--paper-icon-button: {
color: var(--paper-icon-button-color);
}
--paper-icon-button-hover: {
color: var(--paper-icon-button-color-hover);
}
}
</style>
<paper-input
no-label-float
.placeholder="${this.header}"
.value="${this.filterValue}"
@value-changed="${this.valueChanged.bind(this)}">
<paper-icon-button
id="clearBtn"
icon="clear"
slot="suffix"
@tap="${this.toggleActive.bind(this)}"></paper-icon-button>
<paper-tooltip
for="clearBtn"
slot="suffix">
${this.localize('clear')}
</paper-tooltip>
</paper-input>`;
}
return content;
}
async toggleActive() {
this.active = !this.active;
this.dispatchEvent(new CustomEvent('active-changed', { detail: { value: this.active } }));
if (!this.active && this.filterValue) {
this.filterValue = null;
this.dispatchFilterEvent();
} else {
await this.updateComplete;
if (this.shadowRoot) {
const paperInput = this.shadowRoot.querySelector('paper-input');
if (paperInput) {
paperInput.setAttribute('tabindex', '1');
paperInput.focus();
}
}
}
}
directionChanged({ detail }: CustomEvent<{value: 'asc' | 'desc' | ''}>) {
if (this.direction !== detail.value) {
this.direction = detail.value;
this.dispatchEvent(new CustomEvent('direction-changed', { detail: { value: this.direction } }));
}
}
valueChanged({ detail }: CustomEvent<{value: string}>) {
if (this.filterValue !== detail.value) {
this.filterValue = detail.value;
this.dispatchFilterEvent();
}
}
dispatchFilterEvent() {
this.dispatchEvent(new CustomEvent('filter-value-changed', { detail: { value: this.filterValue, property: this.property } }));
}
} | --paper-input-container-input: {
font-size: 7px;
}; |
777_all_in_one_v1.py | #=========================================================
# Developer: Vajira Thambawita
# Reference: https://github.com/meetshah1995/pytorch-semseg
#=========================================================
import argparse
from datetime import datetime
import os
import copy
from tqdm import tqdm
import matplotlib.pyplot as plt
import numpy as np
#Pytorch
import torch
import torch.optim as optim
from torch.optim import lr_scheduler
import torch.nn as nn
from torch.utils.data import DataLoader
from torchvision import models, transforms,datasets, utils
from torchvision.utils import save_image
from torch.utils.tensorboard import SummaryWriter
from torch.autograd import Variable
from torchsummary import summary
import segmentation_models_pytorch as smp
from data.dataset import Dataset
from data.prepare_data import prepare_data, prepare_test_data
#from data import PolypsDatasetWithGridEncoding
#from data import PolypsDatasetWithGridEncoding_TestData
import pyra_pytorch as pyra
from utils import dice_coeff, iou_pytorch, visualize
import segmentation_models_pytorch as smp
#======================================
# Get and set all input parameters
#======================================
parser = argparse.ArgumentParser()
# Hardware
#parser.add_argument("--device", default="gpu", help="Device to run the code")
parser.add_argument("--device_id", type=int, default=0, help="")
# Optional parameters to identify the experiments
parser.add_argument("--exp_name", type=str, help="A name to identify the experiment", required=True)
#parser.add_argument("--py_file",default=os.path.abspath(__file__)) # store current python file
# Directory and file handling
parser.add_argument("--train_CSVs",
nargs="+",
default=None,
help="CSV file list with image and mask paths")
parser.add_argument("--val_CSVs",
nargs="+",
default=None,
help="CSV file list with image and mask paths")
parser.add_argument("--test_CSVs",
nargs="+",
default=None,
help="CSV file list with image and mask paths")
parser.add_argument("--out_dir",
default="/work/vajira/DATA/sinGAN_polyps/sinGAN_exp_out/checkpoints",
help="Main output dierectory")
parser.add_argument("--tensorboard_dir",
default="/work/vajira/DATA/sinGAN_polyps/sinGAN_exp_out/tensorboard",
help="Folder to save output of tensorboard")
parser.add_argument("--test_out_dir",
default= "/work/vajira/DATA/sinGAN_polyps/sinGAN_exp_out/test_samples",
help="Output folder for testing data"
)
parser.add_argument("--best_checkpoint_name", type=str, default="best_checkpoint.pth", help="A name to save bet checkpoint")
parser.add_argument("--img_size", type=int, default=128, help="Image height and width to resize")
# Action handling
parser.add_argument("--num_epochs", type=int, default=1, help="Numbe of epochs to train")
parser.add_argument("--start_epoch", type=int, default=0, help="start epoch of training")
parser.add_argument("--num_test_samples", type=int, default=5, help="Number of samples to test.")
# smp parameters
parser.add_argument("--model", help="The model to perform segmentation", required=True)
parser.add_argument("--encoder", type=str, default='se_resnext50_32x4d', help="smp encoders")
parser.add_argument("--encoder_weights", type=str, default='imagenet', help="encoder weights")
parser.add_argument("--classes", default=[0,255], help="classes per pixel")
parser.add_argument("--activation", type=str, default='softmax2d', help="last activation layers activation")
#PYRA
parser.add_argument("--pyra", type=bool, default=False, help="To enable PYRA grid encoding.")
parser.add_argument("--grid_sizes_train", type=list, default=[256], help="Grid sizes to use in training")
parser.add_argument("--grid_sizes_val", type=list, default=[256], help="Grid sizes to use in training")
parser.add_argument("--grid_sizes_test", type=list, default=[256], help="Grid sizes to use in testing")
parser.add_argument("--in_channels", type=int, default=3, help="Number of input channgels")
# Parameters
parser.add_argument("--bs", type=int, default=8, help="Mini batch size")
parser.add_argument("--val_bs", type=int, default=1, help="Batch size")
parser.add_argument("--lr", type=float, default=0.0001, help="Learning rate for training")
parser.add_argument("--lr_change_point", type=int, default=50, help="After this point LR will be changed.")
parser.add_argument("--num_workers", type=int, default=12, help="Number of workers in dataloader")
parser.add_argument("--weight_decay", type=float, default=1e-5, help="weight decay of the optimizer")
parser.add_argument("--lr_sch_factor", type=float, default=0.1, help="Factor to reduce lr in the scheduler")
parser.add_argument("--lr_sch_patience", type=int, default=25, help="Num of epochs to be patience for updating lr")
parser.add_argument("--num_samples", type=int, default=5, help="Number of samples to print from validation set")
parser.add_argument("action", type=str, help="Select an action to run", choices=["train", "retrain", "test", "check", "check_val"])
parser.add_argument("--checkpoint_interval", type=int, default=25, help="Interval to save checkpoint models")
#parser.add_argument("--fold", type=str, default="fold_1", help="Select the validation fold", choices=["fold_1", "fold_2", "fold_3"])
#parser.add_argument("--num_test", default= 200, type=int, help="Number of samples to test set from 1k dataset")
#parser.add_argument("--model_path", default="", help="Model path to load weights")
#parser.add_argument("--num_of_samples", default=30, type=int, help="Number of samples to validate (Montecalo sampling)")
parser.add_argument("--record_name", type=str, default="VAL", help="Some name to identify records in tensorboard output")
opt = parser.parse_args()
#==========================================
# Device handling
#==========================================
torch.cuda.set_device(opt.device_id)
DEVICE = torch.device("cuda" if torch.cuda.is_available() else "cpu")
opt.device = DEVICE
#===========================================
# Folder handling
#===========================================
#make output folder if not exist
os.makedirs(opt.out_dir, exist_ok=True)
# make subfolder in the output folder
#py_file_name = opt.py_file.split("/")[-1] # Get python file name (soruce code name)
CHECKPOINT_DIR = os.path.join(opt.out_dir, opt.exp_name + "/checkpoints")
os.makedirs(CHECKPOINT_DIR, exist_ok=True)
# make tensorboard subdirectory for the experiment
tensorboard_exp_dir = os.path.join(opt.tensorboard_dir, opt.exp_name)
os.makedirs( tensorboard_exp_dir, exist_ok=True)
#==========================================
# Tensorboard
#==========================================
# Initialize summary writer
writer = SummaryWriter(tensorboard_exp_dir)
#==========================================
# Prepare Data
#==========================================
#================================================
# Train the model
#================================================
def train_model(train_loader, valid_loader, model, loss, metrics, optimizer, opt):
# create epoch runners
# it is a simple loop of iterating over dataloader`s samples
train_epoch = smp.utils.train.TrainEpoch(
model,
loss=loss,
metrics=metrics,
optimizer=optimizer,
device=DEVICE,
verbose=True,
)
valid_epoch = smp.utils.train.ValidEpoch(
model,
loss=loss,
metrics=metrics,
device=DEVICE,
verbose=True,
)
max_score = 0
best_chk_path = os.path.join(CHECKPOINT_DIR, opt.best_checkpoint_name)
for i in range(opt.start_epoch + 1, opt.start_epoch + opt.num_epochs +1 ):
print('\nEpoch: {}'.format(i))
train_logs = train_epoch.run(train_loader)
valid_logs = valid_epoch.run(valid_loader)
# do something (save model, change lr, etc.)
if max_score < valid_logs['iou_score']:
max_score = valid_logs['iou_score']
torch.save({"model":model, "epoch": i}, best_chk_path)
print('Best Model saved!')
print("Testing....")
do_test(opt)
print("Tested")
if i == opt.lr_change_point:
optimizer.param_groups[0]['lr'] = 1e-5
print('Decrease decoder learning rate to 1e-5!')
# writing to logs to tensorboard
for key, value in train_logs.items():
writer.add_scalar(f"Train/{key}", value, i)
for key, value in valid_logs.items():
writer.add_scalar(f"Valid/{key}", value, i)
# update here
#==============================================
# Heatmap generator from tensor
#==============================================
def generate_heatmapts(img_tensor):
print(img_tensor.shape)
fig_list = []
for n in range(img_tensor.shape[0]):
img = img_tensor[n]
img = img.squeeze(dim=0)
img_np = img.detach().cpu().numpy()
#img_np = np.transforms(img_np, (1,2,0))
plt.imshow(img_np, cmap="hot")
fig = plt.gcf()
fig_list.append(fig)
# plt.clf()
plt.close()
return fig_list
#===============================================
# Prepare models
#===============================================
def prepare_model(opt):
# model = UNet(n_channels=4, n_classes=1) # 4 = 3 channels + 1 grid encode
# create segmentation model with pretrained encoder
model = getattr(smp, opt.model)(
encoder_name=opt.encoder,
in_channels=opt.in_channels,
encoder_weights=opt.encoder_weights,
classes=len(opt.classes),
activation=opt.activation,
)
return model
#====================================
# Run training process
#====================================
def run_train(opt):
model = prepare_model(opt)
preprocessing_fn = smp.encoders.get_preprocessing_fn(opt.encoder, opt.encoder_weights)
train_loader, val_loader = prepare_data(opt, preprocessing_fn=None)
loss = smp.utils.losses.DiceLoss(ignore_channels=[0])
metrics = [
smp.utils.metrics.IoU(threshold=0.5, ignore_channels=[0]),
]
optimizer = torch.optim.Adam([
dict(params=model.parameters(), lr=opt.lr),
])
train_model(train_loader, val_loader, model, loss, metrics, optimizer, opt)
#====================================
# Re-train process
#====================================
def run_retrain(opt):
checkpoint_dict = torch.load(os.path.join(CHECKPOINT_DIR, opt.best_checkpoint_name))
opt.start_epoch = checkpoint_dict["epoch"]
model = checkpoint_dict["model"]
print("Model epoch:", checkpoint_dict["epoch"])
print("Model retrain started from epoch:", opt.start_epoch)
preprocessing_fn = smp.encoders.get_preprocessing_fn(opt.encoder, opt.encoder_weights)
train_loader, val_loader = prepare_data(opt, preprocessing_fn)
loss = smp.utils.losses.DiceLoss()
metrics = [
smp.utils.metrics.IoU(threshold=0.5),
]
optimizer = torch.optim.Adam([
dict(params=model.parameters(), lr=opt.lr),
])
train_model(train_loader, val_loader, model, loss, metrics, optimizer, opt)
#=====================================
# Check model
#====================================
def check_model_graph():
raise NotImplementedError
#===================================
# Inference from pre-trained models
#===================================
def do_test(opt):
checkpoint_dict = torch.load(os.path.join(CHECKPOINT_DIR, opt.best_checkpoint_name)) |
print("Model best epoch:", test_epoch)
preprocessing_fn = smp.encoders.get_preprocessing_fn(opt.encoder, opt.encoder_weights)
test_dataset = prepare_test_data(opt, preprocessing_fn=None)
test_dataset_vis = prepare_test_data(opt, preprocessing_fn=None)
for i in range(opt.num_test_samples):
image, mask = test_dataset[i]
image_vis, _ = test_dataset_vis[i]
#print(image)
mask_tensor = torch.from_numpy(mask).to(opt.device).unsqueeze(0)
image_tensor = torch.from_numpy(image).to(opt.device).unsqueeze(0)
pr_mask = best_model.predict(image_tensor)
pr_mask = pr_mask.squeeze().cpu().numpy().round()
fig = visualize(
input_image_new=np.transpose(image_vis, (1,2,0)).astype(int),
GT_mask_0=mask[0, :,:],
Pred_mask_0 = pr_mask[0,:,:],
GT_mask_1= mask[1,:,:],
Pred_mask_1 = pr_mask[1, :,:]
)
fig.savefig(f"./test_202_{i}.png")
writer.add_figure(f"Test_sample/sample-{i}", fig, global_step=test_epoch)
def check_test_score(opt):
checkpoint_dict = torch.load(os.path.join(CHECKPOINT_DIR, opt.best_checkpoint_name))
test_best_epoch = checkpoint_dict["epoch"]
best_model = checkpoint_dict["model"]
print("Model best epoch:", test_best_epoch)
preprocessing_fn = smp.encoders.get_preprocessing_fn(opt.encoder, opt.encoder_weights)
test_dataset = prepare_test_data(opt, preprocessing_fn=None)
test_dataloader = DataLoader(test_dataset, num_workers=48)
loss = smp.utils.losses.DiceLoss()
# Testing with two class layers
metrics = [
#smp.utils.metrics.IoU(threshold=0.5),
smp.utils.metrics.IoU(threshold=0.5, ignore_channels=None),
]
test_epoch = smp.utils.train.ValidEpoch(
model=best_model,
loss=loss,
metrics=metrics,
device=DEVICE,
)
logs = test_epoch.run(test_dataloader)
print("logs=", str(logs))
writer.add_text(f"{opt.exp_name}-test-score", str(logs), global_step=test_best_epoch)
# Testing with only class layer 1 (polyps)
loss = smp.utils.losses.DiceLoss(ignore_channels=[0])
metrics = [
#smp.utils.metrics.IoU(threshold=0.5),
smp.utils.metrics.IoU(threshold=0.5, ignore_channels=[0]),
]
test_epoch = smp.utils.train.ValidEpoch(
model=best_model,
loss=loss,
metrics=metrics,
device=DEVICE,
)
logs = test_epoch.run(test_dataloader)
print("logs=", str(logs))
writer.add_text(f"{opt.exp_name}-test-score-ignore-channel-0", str(logs), global_step=test_best_epoch)
# Testing with only class layer 0 (BG)
loss = smp.utils.losses.DiceLoss(ignore_channels=[1])
metrics = [
#smp.utils.metrics.IoU(threshold=0.5),
smp.utils.metrics.IoU(threshold=0.5, ignore_channels=[1]),
]
test_epoch = smp.utils.train.ValidEpoch(
model=best_model,
loss=loss,
metrics=metrics,
device=DEVICE,
)
logs = test_epoch.run(test_dataloader)
print("logs=", str(logs))
writer.add_text(f"{opt.exp_name}-test-score-ignore-channel-1", str(logs), global_step=test_best_epoch)
def check_val_full_score(opt):
# changing test data files into val data
#opt.test_CSVs = opt.val_CSVs
#opt.record_name = "VAL"
checkpoint_dict = torch.load(os.path.join(CHECKPOINT_DIR, opt.best_checkpoint_name))
test_best_epoch = checkpoint_dict["epoch"]
best_model = checkpoint_dict["model"]
print("Model best epoch:", test_best_epoch)
preprocessing_fn = smp.encoders.get_preprocessing_fn(opt.encoder, opt.encoder_weights)
test_dataset = prepare_test_data(opt, preprocessing_fn=None)
test_dataloader = DataLoader(test_dataset, num_workers=12)
loss = smp.utils.losses.DiceLoss()
# Testing with two class layers
metrics = [
#smp.utils.metrics.IoU(threshold=0.5),
smp.utils.metrics.IoU(threshold=0.5, ignore_channels=None),
smp.utils.metrics.Fscore(threshold=0.5, ignore_channels=None),
smp.utils.metrics.Accuracy(threshold=0.5, ignore_channels=None),
smp.utils.metrics.Recall(threshold=0.5, ignore_channels=None),
smp.utils.metrics.Precision(threshold=0.5, ignore_channels=None),
]
test_epoch = smp.utils.train.ValidEpoch(
model=best_model,
loss=loss,
metrics=metrics,
device=DEVICE,
)
logs = test_epoch.run(test_dataloader)
print("logs=", str(logs))
writer.add_text(f"{opt.exp_name}-scores-->{opt.record_name}", str(logs), global_step=test_best_epoch)
# Testing with only class layer 1 (polyps)
loss = smp.utils.losses.DiceLoss(ignore_channels=[0])
metrics = [
#smp.utils.metrics.IoU(threshold=0.5),
smp.utils.metrics.IoU(threshold=0.5, ignore_channels=[0]),
smp.utils.metrics.Fscore(threshold=0.5, ignore_channels=[0]),
smp.utils.metrics.Accuracy(threshold=0.5, ignore_channels=[0]),
smp.utils.metrics.Recall(threshold=0.5, ignore_channels=[0]),
smp.utils.metrics.Precision(threshold=0.5, ignore_channels=[0]),
]
test_epoch = smp.utils.train.ValidEpoch(
model=best_model,
loss=loss,
metrics=metrics,
device=DEVICE,
)
logs = test_epoch.run(test_dataloader)
print("logs=", str(logs))
writer.add_text(f"{opt.exp_name}-val-scores-ignore-channel-0-->{opt.record_name}", str(logs), global_step=test_best_epoch)
# Testing with only class layer 0 (BG)
loss = smp.utils.losses.DiceLoss(ignore_channels=[1])
metrics = [
#smp.utils.metrics.IoU(threshold=0.5),
smp.utils.metrics.IoU(threshold=0.5, ignore_channels=[1]),
smp.utils.metrics.Fscore(threshold=0.5, ignore_channels=[1]),
smp.utils.metrics.Accuracy(threshold=0.5, ignore_channels=[1]),
smp.utils.metrics.Recall(threshold=0.5, ignore_channels=[1]),
smp.utils.metrics.Precision(threshold=0.5, ignore_channels=[1]),
]
test_epoch = smp.utils.train.ValidEpoch(
model=best_model,
loss=loss,
metrics=metrics,
device=DEVICE,
)
logs = test_epoch.run(test_dataloader)
print("logs=", str(logs))
writer.add_text(f"{opt.exp_name}-val-scores-ignore-channel-1-->{opt.record_name}", str(logs), global_step=test_best_epoch)
if __name__ == "__main__":
#data_loaders = prepare_data()
print(vars(opt))
print("Test OK")
# Train or retrain or inference
if opt.action == "train":
print("Training process is strted..!")
run_train(opt)
pass
elif opt.action == "retrain":
print("Retrainning process is strted..!")
run_retrain(opt)
pass
elif opt.action == "test":
print("Inference process is strted..!")
do_test(opt)
print("Done")
elif opt.action == "check":
check_test_score(opt)
print("Check pass")
elif opt.action == "check_val":
check_val_full_score(opt)
# Finish tensorboard writer
writer.close() |
test_epoch = checkpoint_dict["epoch"]
best_model = checkpoint_dict["model"] |
command_runner.go | package powercycle
import (
"bytes"
"context"
"io"
"strings"
"time"
"go.skia.org/infra/go/executil" | "go.skia.org/infra/go/skerr"
)
// execTimeout is the timeout when we exec a command over SSH.
const execTimeout = 10 * time.Second
// The CommandRunner interface adds a layer of abstraction around sending commands to powercycle
// Controllers. It is not meant to be a general purpose interface or a robust implementation beyond
// exactly that use.
type CommandRunner interface {
// ExecCmds executes a series of commands and returns the accumulated output of all commands.
// If one command fails, an error is returned, but no other guarantees are made.
ExecCmds(ctx context.Context, cmds ...string) (string, error)
}
// stdinRunner implements the CommandRunner interface by sending commands through standard input
// to the given executable running with the given args.
type stdinRunner struct {
executable string
args []string
}
// PublicKeySSHCommandRunner returns a CommandRunner that will operate over a native ssh binary
// with the following arguments. One of the provided arguments should be the user/ip address.
// It presumes that the target is configured to authenticate via a shared public key (e.g. in
// .ssh/authorized_keys), as it does not expect or support ssh prompting for a password.
func PublicKeySSHCommandRunner(sshArgs ...string) *stdinRunner {
return &stdinRunner{
executable: "ssh",
args: sshArgs,
}
}
// PasswordSSHCommandRunner returns a CommandRunner that will operate over a native ssh binary
// with the following arguments. One of the provided arguments should be the user/ip address.
// It passes the password into ssh via sshpass. See
// http://manpages.ubuntu.com/manpages/trusty/man1/sshpass.1.html for more details on why sshpass
// is needed to give the password to ssh.
// Note: ssh is known to return errors even when the command executed normally. To work around
// this, ignore the error returned by ExecCmds and look at the standard out.
func PasswordSSHCommandRunner(password string, sshArgs ...string) *stdinRunner {
args := append([]string{"-p", password, "ssh"}, sshArgs...)
return &stdinRunner{
executable: "sshpass",
args: args,
}
}
// ExecCmds implements the CommandRunner interface. It makes a connection to the
// target and then feeds the commands into standard in joined by newlines. It
// returns any output it receives and any errors.
func (s *stdinRunner) ExecCmds(ctx context.Context, cmds ...string) (string, error) {
ctx, cancel := context.WithTimeout(ctx, execTimeout)
defer cancel()
cmd := executil.CommandContext(ctx, s.executable, s.args...)
stdin, err := cmd.StdinPipe()
if err != nil {
return "", skerr.Wrapf(err, "getting stdin pipe")
}
var combined bytes.Buffer
cmd.Stdout = &combined
cmd.Stderr = &combined
// Start the command before sending to stdin just in case we try to send
// more data to standard input than it can take (~4k).
if err := cmd.Start(); err != nil {
return "", skerr.Wrapf(err, "starting executable %s %s", s.executable, s.args)
}
// Commands sent via standard in are executed after a newline is seen.
cmdStr := strings.Join(cmds, "\n") + "\n"
if _, err := io.WriteString(stdin, cmdStr); err != nil {
return "", skerr.Wrapf(err, "sending command %q to stdin", cmdStr)
}
// SSH will keep running until stdin is closed, so we need to close it before we Wait, otherwise
// Wait will block forever.
if err := stdin.Close(); err != nil {
return "", skerr.Wrapf(err, "closing stdin pipe")
}
if err := cmd.Wait(); err != nil {
// combined could have valid input if err is non-nil, e.g. why it crashed.
return combined.String(), skerr.Wrapf(err, "running %q", cmds)
}
return combined.String(), nil
}
var _ CommandRunner = (*stdinRunner)(nil) | |
a_star_03.py | maze = [[0,0,0,0,0,0,0,0,0,0],
[0,1,1,1,1,1,1,1,1,0],
[0,1,1,1,1,1,1,1,1,0],
[0,1,1,1,1,1,1,1,1,0],
[0,1,1,1,1,1,1,1,1,0],
[0,1,1,1,1,1,1,1,1,0],
[0,1,1,1,1,1,1,1,1,0],
[0,1,1,1,1,1,1,1,1,0],
[0,1,1,1,1,1,1,1,1,0],
[0,0,0,0,0,0,0,0,0,0]]
# we start at (1,1)
a = 1
b = 1
h = k = 4
def | (p,q):
t = 1+abs(p-h)+abs(q-k)
return t
while(a!=4 and b!=4):
# checking the posibility of motion
maze[a][b] = '*'
if(maze[a+1][b]==1): #checking for right
t_r=cost(a+1,b)
else:
t_r = cost(a,b)
if(maze[a][b+1]==1):
t_d = cost(a,b+1)
else:
t_d = cost(a,b)
if(maze[a-1][b]==1):
t_l = cost(a-1,b)
else:
t_l = cost(a,b)
if(maze[a][b-1]==1):
t_u = cost(a,b-1)
else:
t_u = cost(a,b)
# comparison between costs
if (t_d > t_l or t_d > t_r or t_d > t_u):
a = a
b = b
# then t_d is not the least values of cost
else:
# then t_d is the least value
b = b + 1
if (t_r > t_l or t_r > t_u or t_r > t_d):
a = a
b = b
else:
a += 1
if (t_l > t_u or t_l > t_r or t_l > t_d):
a = a
b = b
else:
a = a - 1
if (t_u > t_l or t_u > t_r or t_r > t_d):
a = a
b = b
else:
b = b - 1
if (a == 4 and b == 4):
maze[a][b] = "*"
print(maze)
| cost |
stats.py | """Miscellaneous statistical functions."""
import numpy as np
import scipy.stats as ss
from scipy.optimize import Bounds, minimize
def weighted_least_squares(y, v, X, tau2=0.0, return_cov=False):
"""2-D weighted least squares.
Args:
y (NDArray): 2-d array of estimates (studies x parallel datasets)
v (NDArray): 2-d array of sampling variances
X (NDArray): Fixed effect design matrix
tau2 (float): tau^2 estimate to use for weights
return_cov (bool): Whether or not to return the inverse cov matrix
Returns:
If return_cov is True, returns both fixed parameter estimates and the
inverse covariance matrix; if False, only the parameter estimates.
"""
w = 1.0 / (v + tau2)
# Einsum indices: k = studies, p = predictors, i = parallel iterates
wX = np.einsum("kp,ki->ipk", X, w)
cov = wX.dot(X)
# numpy >= 1.8 inverts stacked matrices along the first N - 2 dims, so we
# can vectorize computation along the second dimension (parallel datasets)
precision = np.linalg.pinv(cov).T
pwX = np.einsum("ipk,qpi->iqk", wX, precision)
beta = np.einsum("ipk,ik->ip", pwX, y.T).T
return (beta, precision) if return_cov else beta
def ensure_2d(arr):
"""Ensure the passed array has 2 dimensions."""
if arr is None:
return arr
try:
arr = np.array(arr)
except:
return arr
if arr.ndim == 1:
arr = arr[:, None]
return arr
def q_profile(y, v, X, alpha=0.05):
"""Get the CI for tau^2 via the Q-Profile method (Viechtbauer, 2007).
Args:
y (ndarray): 1d array of study-level estimates
v (ndarray): 1d array of study-level variances
X (ndarray): 1d or 2d array containing study-level predictors
(including intercept); has dimensions K x P, where K is the number
of studies and P is the number of predictor variables.
alpha (float, optional): alpha value defining the coverage of the CIs,
where width(CI) = 1 - alpha. Defaults to 0.05.
Returns:
A dictionary with keys 'ci_l' and 'ci_u', corresponding to the lower
and upper bounds of the tau^2 confidence interval, respectively.
Notes:
Following the Viechtbauer implementation, this method returns the
interval that gives an equal probability mass at both tails (i.e.,
P(tau^2 <= lower_bound) == P(tau^2 >= upper_bound) == alpha/2), and
*not* the smallest possible range of tau^2 values that provides the
desired coverage.
References:
Viechtbauer, W. (2007). Confidence intervals for the amount of
heterogeneity in meta-analysis. Statistics in Medicine, 26(1), 37-52.
"""
k, p = X.shape
df = k - p
l_crit = ss.chi2.ppf(1 - alpha / 2, df) | # Use the D-L estimate of tau^2 as a starting point; when using a fixed
# value, minimize() sometimes fails to stay in bounds.
from .estimators import DerSimonianLaird
ub_start = 2 * DerSimonianLaird().fit(y, v, X).params_["tau2"]
lb = minimize(lambda x: (q_gen(*args, x) - l_crit) ** 2, [0], bounds=bds).x[0]
ub = minimize(lambda x: (q_gen(*args, x) - u_crit) ** 2, [ub_start], bounds=bds).x[0]
return {"ci_l": lb, "ci_u": ub}
def q_gen(y, v, X, tau2):
"""Generalized form of Cochran's Q-statistic.
Args:
y (ndarray): 1d array of study-level estimates
v (ndarray): 1d array of study-level variances
X (ndarray): 1d or 2d array containing study-level predictors
(including intercept); has dimensions K x P, where K is the number
of studies and P is the number of predictor variables.
tau2 (float): Between-study variance. Must be >= 0.
Returns:
A float giving the value of Cochran's Q-statistic.
References:
Veroniki, A. A., Jackson, D., Viechtbauer, W., Bender, R., Bowden, J.,
Knapp, G., Kuss, O., Higgins, J. P., Langan, D., & Salanti, G. (2016).
Methods to estimate the between-study variance and its uncertainty in
meta-analysis. Research synthesis methods, 7(1), 55–79.
https://doi.org/10.1002/jrsm.1164
"""
if np.any(tau2 < 0):
raise ValueError("Value of tau^2 must be >= 0.")
beta = weighted_least_squares(y, v, X, tau2)
w = 1.0 / (v + tau2)
return (w * (y - X.dot(beta)) ** 2).sum(0) | u_crit = ss.chi2.ppf(alpha / 2, df)
args = (ensure_2d(y), ensure_2d(v), X)
bds = Bounds([0], [np.inf], keep_feasible=True)
|
releases.rs | use std::env;
use std::ffi::OsStr;
use std::fs;
use std::io::Read;
use std::path::{Path, PathBuf};
use failure::{bail, Error};
use if_chain::if_chain;
use lazy_static::lazy_static;
use regex::Regex;
use crate::utils::cordova::CordovaConfig;
use crate::utils::vcs;
use crate::utils::xcode::InfoPlist;
pub fn get_cordova_release_name(path: Option<PathBuf>) -> Result<Option<String>, Error> |
pub fn get_xcode_release_name(plist: Option<InfoPlist>) -> Result<Option<String>, Error> {
// if we are executed from within xcode, then we can use the environment
// based discovery to get a release name without any interpolation.
if let Some(plist) = plist.or(InfoPlist::discover_from_env()?) {
return Ok(Some(plist.get_release_name()));
}
Ok(None)
}
pub fn infer_gradle_release_name(path: Option<PathBuf>) -> Result<Option<String>, Error> {
lazy_static! {
static ref APP_ID_RE: Regex = Regex::new(r#"applicationId\s+["']([^"']*)["']"#).unwrap();
static ref VERSION_NAME_RE: Regex =
Regex::new(r#"versionName\s+["']([^"']*)["']"#).unwrap();
}
let mut contents = String::new();
let mut here = path.unwrap_or(env::current_dir()?);
loop {
if_chain! {
if let Ok(build_md) = here.join("build.gradle").metadata();
if build_md.is_file();
if let Ok(app_md) = here.join("app/build.gradle").metadata();
if app_md.is_file();
if let Ok(mut f) = fs::File::open(here.join("app/build.gradle"));
if f.read_to_string(&mut contents).is_ok();
if let Some(app_id_caps) = APP_ID_RE.captures(&contents);
if let Some(version_caps) = VERSION_NAME_RE.captures(&contents);
then {
return Ok(Some(format!("{}-{}", &app_id_caps[1], &version_caps[1])));
}
}
if !here.pop() {
break;
}
}
Ok(None)
}
/// Detects the release name for the current working directory.
pub fn detect_release_name() -> Result<String, Error> {
// cordova release detection first.
if let Some(release) = get_cordova_release_name(None)? {
return Ok(release);
}
// try heroku: https://docs.sentry.io/workflow/integrations/legacy-integrations/heroku/#configure-releases
if let Ok(release) = env::var("HEROKU_SLUG_COMMIT") {
if !release.is_empty() {
return Ok(release);
}
}
// for now only execute this on macs. The reason is that this uses
// xcodebuild which does not exist anywhere but there.
if_chain! {
if cfg!(target_os="macos");
if let Some(release) = get_xcode_release_name(None)?;
then {
return Ok(release)
}
}
// For android we badly parse gradle files. We do this because most of the
// time now people set the ids and versions in the gradle files instead of
// the xml manifests.
if let Some(release) = infer_gradle_release_name(None)? {
return Ok(release);
}
if let Ok(head) = vcs::find_head() {
Ok(head)
} else {
bail!("Could not automatically determine release name");
}
}
| {
let here = path.unwrap_or(env::current_dir()?);
let platform = match here.file_name().and_then(OsStr::to_str) {
Some("android") => "android",
Some("ios") => "ios",
_ => return Ok(None),
};
let base = match here.parent().and_then(Path::parent) {
Some(path) => path,
None => return Ok(None),
};
let path = base.join("config.xml");
if_chain! {
if let Ok(md) = path.metadata();
if md.is_file();
if let Ok(Some(config)) = CordovaConfig::load(path);
then {
match platform {
"android" => Ok(Some(config.android_release_name())),
"ios" => Ok(Some(config.ios_release_name())),
_ => unreachable!(),
}
} else {
Ok(None)
}
}
} |
_match.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! # Compilation of match statements
//!
//! I will endeavor to explain the code as best I can. I have only a loose
//! understanding of some parts of it.
//!
//! ## Matching
//!
//! The basic state of the code is maintained in an array `m` of `Match`
//! objects. Each `Match` describes some list of patterns, all of which must
//! match against the current list of values. If those patterns match, then
//! the arm listed in the match is the correct arm. A given arm may have
//! multiple corresponding match entries, one for each alternative that
//! remains. As we proceed these sets of matches are adjusted by the various
//! `enter_XXX()` functions, each of which adjusts the set of options given
//! some information about the value which has been matched.
//!
//! So, initially, there is one value and N matches, each of which have one
//! constituent pattern. N here is usually the number of arms but may be
//! greater, if some arms have multiple alternatives. For example, here:
//!
//! enum Foo { A, B(int), C(usize, usize) }
//! match foo {
//! A => ...,
//! B(x) => ...,
//! C(1, 2) => ...,
//! C(_) => ...
//! }
//!
//! The value would be `foo`. There would be four matches, each of which
//! contains one pattern (and, in one case, a guard). We could collect the
//! various options and then compile the code for the case where `foo` is an
//! `A`, a `B`, and a `C`. When we generate the code for `C`, we would (1)
//! drop the two matches that do not match a `C` and (2) expand the other two
//! into two patterns each. In the first case, the two patterns would be `1`
//! and `2`, and the in the second case the _ pattern would be expanded into
//! `_` and `_`. The two values are of course the arguments to `C`.
//!
//! Here is a quick guide to the various functions:
//!
//! - `compile_submatch()`: The main workhouse. It takes a list of values and
//! a list of matches and finds the various possibilities that could occur.
//!
//! - `enter_XXX()`: modifies the list of matches based on some information
//! about the value that has been matched. For example,
//! `enter_rec_or_struct()` adjusts the values given that a record or struct
//! has been matched. This is an infallible pattern, so *all* of the matches
//! must be either wildcards or record/struct patterns. `enter_opt()`
//! handles the fallible cases, and it is correspondingly more complex.
//!
//! ## Bindings
//!
//! We store information about the bound variables for each arm as part of the
//! per-arm `ArmData` struct. There is a mapping from identifiers to
//! `BindingInfo` structs. These structs contain the mode/id/type of the
//! binding, but they also contain an LLVM value which points at an alloca
//! called `llmatch`. For by value bindings that are Copy, we also create
//! an extra alloca that we copy the matched value to so that any changes
//! we do to our copy is not reflected in the original and vice-versa.
//! We don't do this if it's a move since the original value can't be used
//! and thus allowing us to cheat in not creating an extra alloca.
//!
//! The `llmatch` binding always stores a pointer into the value being matched
//! which points at the data for the binding. If the value being matched has
//! type `T`, then, `llmatch` will point at an alloca of type `T*` (and hence
//! `llmatch` has type `T**`). So, if you have a pattern like:
//!
//! let a: A = ...;
//! let b: B = ...;
//! match (a, b) { (ref c, d) => { ... } }
//!
//! For `c` and `d`, we would generate allocas of type `C*` and `D*`
//! respectively. These are called the `llmatch`. As we match, when we come
//! up against an identifier, we store the current pointer into the
//! corresponding alloca.
//!
//! Once a pattern is completely matched, and assuming that there is no guard
//! pattern, we will branch to a block that leads to the body itself. For any
//! by-value bindings, this block will first load the ptr from `llmatch` (the
//! one of type `D*`) and then load a second time to get the actual value (the
//! one of type `D`). For by ref bindings, the value of the local variable is
//! simply the first alloca.
//!
//! So, for the example above, we would generate a setup kind of like this:
//!
//! +-------+
//! | Entry |
//! +-------+
//! |
//! +--------------------------------------------+
//! | llmatch_c = (addr of first half of tuple) |
//! | llmatch_d = (addr of second half of tuple) |
//! +--------------------------------------------+
//! |
//! +--------------------------------------+
//! | *llbinding_d = **llmatch_d |
//! +--------------------------------------+
//!
//! If there is a guard, the situation is slightly different, because we must
//! execute the guard code. Moreover, we need to do so once for each of the
//! alternatives that lead to the arm, because if the guard fails, they may
//! have different points from which to continue the search. Therefore, in that
//! case, we generate code that looks more like:
//!
//! +-------+
//! | Entry |
//! +-------+
//! |
//! +-------------------------------------------+
//! | llmatch_c = (addr of first half of tuple) |
//! | llmatch_d = (addr of first half of tuple) |
//! +-------------------------------------------+
//! |
//! +-------------------------------------------------+
//! | *llbinding_d = **llmatch_d |
//! | check condition |
//! | if false { goto next case } |
//! | if true { goto body } |
//! +-------------------------------------------------+
//!
//! The handling for the cleanups is a bit... sensitive. Basically, the body
//! is the one that invokes `add_clean()` for each binding. During the guard
//! evaluation, we add temporary cleanups and revoke them after the guard is
//! evaluated (it could fail, after all). Note that guards and moves are
//! just plain incompatible.
//!
//! Some relevant helper functions that manage bindings:
//! - `create_bindings_map()`
//! - `insert_lllocals()`
//!
//!
//! ## Notes on vector pattern matching.
//!
//! Vector pattern matching is surprisingly tricky. The problem is that
//! the structure of the vector isn't fully known, and slice matches
//! can be done on subparts of it.
//!
//! The way that vector pattern matches are dealt with, then, is as
//! follows. First, we make the actual condition associated with a
//! vector pattern simply a vector length comparison. So the pattern
//! [1, .. x] gets the condition "vec len >= 1", and the pattern
//! [.. x] gets the condition "vec len >= 0". The problem here is that
//! having the condition "vec len >= 1" hold clearly does not mean that
//! only a pattern that has exactly that condition will match. This
//! means that it may well be the case that a condition holds, but none
//! of the patterns matching that condition match; to deal with this,
//! when doing vector length matches, we have match failures proceed to
//! the next condition to check.
//!
//! There are a couple more subtleties to deal with. While the "actual"
//! condition associated with vector length tests is simply a test on
//! the vector length, the actual vec_len Opt entry contains more
//! information used to restrict which matches are associated with it.
//! So that all matches in a submatch are matching against the same
//! values from inside the vector, they are split up by how many
//! elements they match at the front and at the back of the vector. In
//! order to make sure that arms are properly checked in order, even
//! with the overmatching conditions, each vec_len Opt entry is
//! associated with a range of matches.
//! Consider the following:
//!
//! match &[1, 2, 3] {
//! [1, 1, .. _] => 0,
//! [1, 2, 2, .. _] => 1,
//! [1, 2, 3, .. _] => 2,
//! [1, 2, .. _] => 3,
//! _ => 4
//! }
//! The proper arm to match is arm 2, but arms 0 and 3 both have the
//! condition "len >= 2". If arm 3 was lumped in with arm 0, then the
//! wrong branch would be taken. Instead, vec_len Opts are associated
//! with a contiguous range of matches that have the same "shape".
//! This is sort of ugly and requires a bunch of special handling of
//! vec_len options.
pub use self::BranchKind::*;
pub use self::OptResult::*;
pub use self::TransBindingMode::*;
use self::Opt::*;
use self::FailureHandler::*;
use llvm::{ValueRef, BasicBlockRef};
use rustc_const_eval::check_match::{self, Constructor, StaticInliner};
use rustc_const_eval::{compare_lit_exprs, eval_const_expr, fatal_const_eval_err};
use rustc::hir::def::{Def, DefMap};
use rustc::hir::def_id::DefId;
use middle::expr_use_visitor as euv;
use middle::lang_items::StrEqFnLangItem;
use middle::mem_categorization as mc;
use middle::mem_categorization::Categorization;
use rustc::hir::pat_util::*;
use rustc::ty::subst::Substs;
use adt;
use base::*;
use build::{AddCase, And, Br, CondBr, GEPi, InBoundsGEP, Load, PointerCast};
use build::{Not, Store, Sub, add_comment};
use build;
use callee::{Callee, ArgVals};
use cleanup::{self, CleanupMethods, DropHintMethods};
use common::*;
use consts;
use datum::*;
use debuginfo::{self, DebugLoc, ToDebugLoc};
use expr::{self, Dest};
use monomorphize;
use tvec;
use type_of;
use Disr;
use value::Value;
use rustc::ty::{self, Ty, TyCtxt};
use rustc::traits::ProjectionMode;
use session::config::NoDebugInfo;
use util::common::indenter;
use util::nodemap::FnvHashMap;
use util::ppaux;
use std;
use std::cell::RefCell;
use std::cmp::Ordering;
use std::fmt;
use std::rc::Rc;
use rustc::hir::{self, PatKind};
use syntax::ast::{self, DUMMY_NODE_ID, NodeId};
use syntax_pos::Span;
use rustc::hir::fold::Folder;
use syntax::ptr::P;
#[derive(Copy, Clone, Debug)]
struct ConstantExpr<'a>(&'a hir::Expr);
impl<'a> ConstantExpr<'a> {
fn eq<'b, 'tcx>(self, other: ConstantExpr<'a>, tcx: TyCtxt<'b, 'tcx, 'tcx>) -> bool {
match compare_lit_exprs(tcx, self.0.span, self.0, other.0) {
Ok(result) => result == Ordering::Equal,
Err(_) => bug!("compare_list_exprs: type mismatch"),
}
}
}
// An option identifying a branch (either a literal, an enum variant or a range)
#[derive(Debug)]
enum Opt<'a, 'tcx> {
ConstantValue(ConstantExpr<'a>, DebugLoc),
ConstantRange(ConstantExpr<'a>, ConstantExpr<'a>, DebugLoc),
Variant(Disr, Rc<adt::Repr<'tcx>>, DefId, DebugLoc),
SliceLengthEqual(usize, DebugLoc),
SliceLengthGreaterOrEqual(/* prefix length */ usize,
/* suffix length */ usize,
DebugLoc),
}
impl<'a, 'b, 'tcx> Opt<'a, 'tcx> {
fn eq(&self, other: &Opt<'a, 'tcx>, tcx: TyCtxt<'b, 'tcx, 'tcx>) -> bool {
match (self, other) {
(&ConstantValue(a, _), &ConstantValue(b, _)) => a.eq(b, tcx),
(&ConstantRange(a1, a2, _), &ConstantRange(b1, b2, _)) => {
a1.eq(b1, tcx) && a2.eq(b2, tcx)
}
(&Variant(a_disr, ref a_repr, a_def, _),
&Variant(b_disr, ref b_repr, b_def, _)) => {
a_disr == b_disr && *a_repr == *b_repr && a_def == b_def
}
(&SliceLengthEqual(a, _), &SliceLengthEqual(b, _)) => a == b,
(&SliceLengthGreaterOrEqual(a1, a2, _),
&SliceLengthGreaterOrEqual(b1, b2, _)) => {
a1 == b1 && a2 == b2
}
_ => false
}
}
fn trans<'blk>(&self, mut bcx: Block<'blk, 'tcx>) -> OptResult<'blk, 'tcx> {
use consts::TrueConst::Yes;
let _icx = push_ctxt("match::trans_opt");
let ccx = bcx.ccx();
match *self {
ConstantValue(ConstantExpr(lit_expr), _) => {
let lit_ty = bcx.tcx().node_id_to_type(lit_expr.id);
let expr = consts::const_expr(ccx, &lit_expr, bcx.fcx.param_substs, None, Yes);
let llval = match expr {
Ok((llval, _)) => llval,
Err(err) => {
fatal_const_eval_err(bcx.tcx(), err.as_inner(), lit_expr.span, "pattern");
}
};
let lit_datum = immediate_rvalue(llval, lit_ty);
let lit_datum = unpack_datum!(bcx, lit_datum.to_appropriate_datum(bcx));
SingleResult(Result::new(bcx, lit_datum.val))
}
ConstantRange(ConstantExpr(ref l1), ConstantExpr(ref l2), _) => {
let l1 = match consts::const_expr(ccx, &l1, bcx.fcx.param_substs, None, Yes) {
Ok((l1, _)) => l1,
Err(err) => fatal_const_eval_err(bcx.tcx(), err.as_inner(), l1.span, "pattern"),
};
let l2 = match consts::const_expr(ccx, &l2, bcx.fcx.param_substs, None, Yes) {
Ok((l2, _)) => l2,
Err(err) => fatal_const_eval_err(bcx.tcx(), err.as_inner(), l2.span, "pattern"),
};
RangeResult(Result::new(bcx, l1), Result::new(bcx, l2))
}
Variant(disr_val, ref repr, _, _) => {
SingleResult(Result::new(bcx, adt::trans_case(bcx, &repr, disr_val)))
}
SliceLengthEqual(length, _) => {
SingleResult(Result::new(bcx, C_uint(ccx, length)))
}
SliceLengthGreaterOrEqual(prefix, suffix, _) => {
LowerBound(Result::new(bcx, C_uint(ccx, prefix + suffix)))
}
}
}
fn debug_loc(&self) -> DebugLoc {
match *self {
ConstantValue(_,debug_loc) |
ConstantRange(_, _, debug_loc) |
Variant(_, _, _, debug_loc) |
SliceLengthEqual(_, debug_loc) |
SliceLengthGreaterOrEqual(_, _, debug_loc) => debug_loc
}
}
}
#[derive(Copy, Clone, PartialEq)]
pub enum BranchKind {
NoBranch,
Single,
Switch,
Compare,
CompareSliceLength
}
pub enum OptResult<'blk, 'tcx: 'blk> {
SingleResult(Result<'blk, 'tcx>),
RangeResult(Result<'blk, 'tcx>, Result<'blk, 'tcx>),
LowerBound(Result<'blk, 'tcx>)
}
#[derive(Clone, Copy, PartialEq)]
pub enum TransBindingMode {
/// By-value binding for a copy type: copies from matched data
/// into a fresh LLVM alloca.
TrByCopy(/* llbinding */ ValueRef),
/// By-value binding for a non-copy type where we copy into a
/// fresh LLVM alloca; this most accurately reflects the language
/// semantics (e.g. it properly handles overwrites of the matched
/// input), but potentially injects an unwanted copy.
TrByMoveIntoCopy(/* llbinding */ ValueRef),
/// Binding a non-copy type by reference under the hood; this is
/// a codegen optimization to avoid unnecessary memory traffic.
TrByMoveRef,
/// By-ref binding exposed in the original source input.
TrByRef,
}
impl TransBindingMode {
/// if binding by making a fresh copy; returns the alloca that it
/// will copy into; otherwise None.
fn alloca_if_copy(&self) -> Option<ValueRef> {
match *self {
TrByCopy(llbinding) | TrByMoveIntoCopy(llbinding) => Some(llbinding),
TrByMoveRef | TrByRef => None,
}
}
}
/// Information about a pattern binding:
/// - `llmatch` is a pointer to a stack slot. The stack slot contains a
/// pointer into the value being matched. Hence, llmatch has type `T**`
/// where `T` is the value being matched.
/// - `trmode` is the trans binding mode
/// - `id` is the node id of the binding
/// - `ty` is the Rust type of the binding
#[derive(Clone, Copy)]
pub struct BindingInfo<'tcx> {
pub llmatch: ValueRef,
pub trmode: TransBindingMode,
pub id: ast::NodeId,
pub span: Span,
pub ty: Ty<'tcx>,
}
type BindingsMap<'tcx> = FnvHashMap<ast::Name, BindingInfo<'tcx>>;
struct ArmData<'p, 'blk, 'tcx: 'blk> {
bodycx: Block<'blk, 'tcx>,
arm: &'p hir::Arm,
bindings_map: BindingsMap<'tcx>
}
/// Info about Match.
/// If all `pats` are matched then arm `data` will be executed.
/// As we proceed `bound_ptrs` are filled with pointers to values to be bound,
/// these pointers are stored in llmatch variables just before executing `data` arm.
struct Match<'a, 'p: 'a, 'blk: 'a, 'tcx: 'blk> {
pats: Vec<&'p hir::Pat>,
data: &'a ArmData<'p, 'blk, 'tcx>,
bound_ptrs: Vec<(ast::Name, ValueRef)>,
// Thread along renamings done by the check_match::StaticInliner, so we can
// map back to original NodeIds
pat_renaming_map: Option<&'a FnvHashMap<(NodeId, Span), NodeId>>
}
impl<'a, 'p, 'blk, 'tcx> fmt::Debug for Match<'a, 'p, 'blk, 'tcx> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if ppaux::verbose() {
// for many programs, this just take too long to serialize
write!(f, "{:?}", self.pats)
} else {
write!(f, "{} pats", self.pats.len())
}
}
}
fn has_nested_bindings(m: &[Match], col: usize) -> bool {
for br in m {
if let PatKind::Binding(_, _, Some(..)) = br.pats[col].node {
return true
}
}
false
}
// As noted in `fn match_datum`, we should eventually pass around a
// `Datum<Lvalue>` for the `val`; but until we get to that point, this
// `MatchInput` struct will serve -- it has everything `Datum<Lvalue>`
// does except for the type field.
#[derive(Copy, Clone)]
pub struct MatchInput { val: ValueRef, lval: Lvalue }
impl<'tcx> Datum<'tcx, Lvalue> {
pub fn match_input(&self) -> MatchInput {
MatchInput {
val: self.val,
lval: self.kind,
}
}
}
impl fmt::Debug for MatchInput {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&Value(self.val), f)
}
}
impl MatchInput {
fn from_val(val: ValueRef) -> MatchInput {
MatchInput {
val: val,
lval: Lvalue::new("MatchInput::from_val"),
}
}
fn to_datum<'tcx>(self, ty: Ty<'tcx>) -> Datum<'tcx, Lvalue> {
Datum::new(self.val, ty, self.lval)
}
}
fn expand_nested_bindings<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
m: &[Match<'a, 'p, 'blk, 'tcx>],
col: usize,
val: MatchInput)
-> Vec<Match<'a, 'p, 'blk, 'tcx>> {
debug!("expand_nested_bindings(bcx={}, m={:?}, col={}, val={:?})",
bcx.to_str(), m, col, val);
let _indenter = indenter();
m.iter().map(|br| {
let mut bound_ptrs = br.bound_ptrs.clone();
let mut pat = br.pats[col];
loop {
pat = match pat.node {
PatKind::Binding(_, ref path, Some(ref inner)) => {
bound_ptrs.push((path.node, val.val));
&inner
},
_ => break
}
}
let mut pats = br.pats.clone();
pats[col] = pat;
Match {
pats: pats,
data: &br.data,
bound_ptrs: bound_ptrs,
pat_renaming_map: br.pat_renaming_map,
}
}).collect()
}
fn enter_match<'a, 'b, 'p, 'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>,
m: &[Match<'a, 'p, 'blk, 'tcx>],
col: usize,
val: MatchInput,
mut e: F)
-> Vec<Match<'a, 'p, 'blk, 'tcx>> where
F: FnMut(&[(&'p hir::Pat, Option<Ty<'tcx>>)])
-> Option<Vec<(&'p hir::Pat, Option<Ty<'tcx>>)>>,
{
debug!("enter_match(bcx={}, m={:?}, col={}, val={:?})",
bcx.to_str(), m, col, val);
let _indenter = indenter();
m.iter().filter_map(|br| {
let pats : Vec<_> = br.pats.iter().map(|p| (*p, None)).collect();
e(&pats).map(|pats| {
let this = br.pats[col];
let mut bound_ptrs = br.bound_ptrs.clone();
match this.node {
PatKind::Binding(_, ref path, None) => {
bound_ptrs.push((path.node, val.val));
}
PatKind::Vec(ref before, Some(ref slice), ref after) => {
if let PatKind::Binding(_, ref path, None) = slice.node {
let subslice_val = bind_subslice_pat(
bcx, this.id, val,
before.len(), after.len());
bound_ptrs.push((path.node, subslice_val));
}
}
_ => {}
}
Match {
pats: pats.into_iter().map(|p| p.0).collect(),
data: br.data,
bound_ptrs: bound_ptrs,
pat_renaming_map: br.pat_renaming_map,
}
})
}).collect()
}
fn enter_default<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
m: &[Match<'a, 'p, 'blk, 'tcx>],
col: usize,
val: MatchInput)
-> Vec<Match<'a, 'p, 'blk, 'tcx>> {
debug!("enter_default(bcx={}, m={:?}, col={}, val={:?})",
bcx.to_str(), m, col, val);
let _indenter = indenter();
// Collect all of the matches that can match against anything.
enter_match(bcx, m, col, val, |pats| {
match pats[col].0.node {
PatKind::Binding(..) | PatKind::Wild => {
let mut r = pats[..col].to_vec();
r.extend_from_slice(&pats[col + 1..]);
Some(r)
}
_ => None
}
})
}
// <pcwalton> nmatsakis: what does enter_opt do?
// <pcwalton> in trans/match
// <pcwalton> trans/match.rs is like stumbling around in a dark cave
// <nmatsakis> pcwalton: the enter family of functions adjust the set of
// patterns as needed
// <nmatsakis> yeah, at some point I kind of achieved some level of
// understanding
// <nmatsakis> anyhow, they adjust the patterns given that something of that
// kind has been found
// <nmatsakis> pcwalton: ok, right, so enter_XXX() adjusts the patterns, as I
// said
// <nmatsakis> enter_match() kind of embodies the generic code
// <nmatsakis> it is provided with a function that tests each pattern to see
// if it might possibly apply and so forth
// <nmatsakis> so, if you have a pattern like {a: _, b: _, _} and one like _
// <nmatsakis> then _ would be expanded to (_, _)
// <nmatsakis> one spot for each of the sub-patterns
// <nmatsakis> enter_opt() is one of the more complex; it covers the fallible
// cases
// <nmatsakis> enter_rec_or_struct() or enter_tuple() are simpler, since they
// are infallible patterns
// <nmatsakis> so all patterns must either be records (resp. tuples) or
// wildcards
/// The above is now outdated in that enter_match() now takes a function that
/// takes the complete row of patterns rather than just the first one.
/// Also, most of the enter_() family functions have been unified with
/// the check_match specialization step.
fn enter_opt<'a, 'p, 'blk, 'tcx>(
bcx: Block<'blk, 'tcx>,
_: ast::NodeId,
m: &[Match<'a, 'p, 'blk, 'tcx>],
opt: &Opt,
col: usize,
variant_size: usize,
val: MatchInput)
-> Vec<Match<'a, 'p, 'blk, 'tcx>> {
debug!("enter_opt(bcx={}, m={:?}, opt={:?}, col={}, val={:?})",
bcx.to_str(), m, *opt, col, val);
let _indenter = indenter();
let ctor = match opt {
&ConstantValue(ConstantExpr(expr), _) => Constructor::ConstantValue(
eval_const_expr(bcx.tcx(), &expr)
),
&ConstantRange(ConstantExpr(lo), ConstantExpr(hi), _) => Constructor::ConstantRange(
eval_const_expr(bcx.tcx(), &lo),
eval_const_expr(bcx.tcx(), &hi)
),
&SliceLengthEqual(n, _) =>
Constructor::Slice(n),
&SliceLengthGreaterOrEqual(before, after, _) =>
Constructor::SliceWithSubslice(before, after),
&Variant(_, _, def_id, _) =>
Constructor::Variant(def_id)
};
let param_env = bcx.tcx().empty_parameter_environment();
let mcx = check_match::MatchCheckCtxt {
tcx: bcx.tcx(),
param_env: param_env,
};
enter_match(bcx, m, col, val, |pats|
check_match::specialize(&mcx, &pats[..], &ctor, col, variant_size)
)
}
// Returns the options in one column of matches. An option is something that
// needs to be conditionally matched at runtime; for example, the discriminant
// on a set of enum variants or a literal.
fn get_branches<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
m: &[Match<'a, 'p, 'blk, 'tcx>],
col: usize)
-> Vec<Opt<'p, 'tcx>> {
let tcx = bcx.tcx();
let mut found: Vec<Opt> = vec![];
for br in m {
let cur = br.pats[col];
let debug_loc = match br.pat_renaming_map {
Some(pat_renaming_map) => {
match pat_renaming_map.get(&(cur.id, cur.span)) {
Some(&id) => DebugLoc::At(id, cur.span),
None => DebugLoc::At(cur.id, cur.span),
}
}
None => DebugLoc::None
};
let opt = match cur.node {
PatKind::Lit(ref l) => {
ConstantValue(ConstantExpr(&l), debug_loc)
}
PatKind::Path(..) | PatKind::TupleStruct(..) | PatKind::Struct(..) => {
match tcx.expect_def(cur.id) {
Def::Variant(enum_id, var_id) => {
let variant = tcx.lookup_adt_def(enum_id).variant_with_id(var_id);
Variant(Disr::from(variant.disr_val),
adt::represent_node(bcx, cur.id),
var_id,
debug_loc)
}
_ => continue
}
}
PatKind::Range(ref l1, ref l2) => {
ConstantRange(ConstantExpr(&l1), ConstantExpr(&l2), debug_loc)
}
PatKind::Vec(ref before, None, ref after) => {
SliceLengthEqual(before.len() + after.len(), debug_loc)
}
PatKind::Vec(ref before, Some(_), ref after) => {
SliceLengthGreaterOrEqual(before.len(), after.len(), debug_loc)
}
_ => continue
};
if !found.iter().any(|x| x.eq(&opt, tcx)) {
found.push(opt);
}
}
found
}
struct ExtractedBlock<'blk, 'tcx: 'blk> {
vals: Vec<ValueRef>,
bcx: Block<'blk, 'tcx>,
}
fn extract_variant_args<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
repr: &adt::Repr<'tcx>,
disr_val: Disr,
val: MatchInput)
-> ExtractedBlock<'blk, 'tcx> {
let _icx = push_ctxt("match::extract_variant_args");
// Assume enums are always sized for now.
let val = adt::MaybeSizedValue::sized(val.val);
let args = (0..adt::num_args(repr, disr_val)).map(|i| {
adt::trans_field_ptr(bcx, repr, val, disr_val, i)
}).collect();
ExtractedBlock { vals: args, bcx: bcx }
}
/// Helper for converting from the ValueRef that we pass around in the match code, which is always
/// an lvalue, into a Datum. Eventually we should just pass around a Datum and be done with it.
fn match_datum<'tcx>(val: MatchInput, left_ty: Ty<'tcx>) -> Datum<'tcx, Lvalue> {
val.to_datum(left_ty)
}
fn bind_subslice_pat(bcx: Block,
pat_id: ast::NodeId,
val: MatchInput,
offset_left: usize,
offset_right: usize) -> ValueRef {
let _icx = push_ctxt("match::bind_subslice_pat");
let vec_ty = node_id_type(bcx, pat_id);
let vec_ty_contents = match vec_ty.sty {
ty::TyBox(ty) => ty,
ty::TyRef(_, mt) | ty::TyRawPtr(mt) => mt.ty,
_ => vec_ty
};
let unit_ty = vec_ty_contents.sequence_element_type(bcx.tcx());
let vec_datum = match_datum(val, vec_ty);
let (base, len) = vec_datum.get_vec_base_and_len(bcx);
let slice_begin = InBoundsGEP(bcx, base, &[C_uint(bcx.ccx(), offset_left)]);
let diff = offset_left + offset_right;
if let ty::TyArray(ty, n) = vec_ty_contents.sty {
let array_ty = bcx.tcx().mk_array(ty, n-diff);
let llty_array = type_of::type_of(bcx.ccx(), array_ty);
return PointerCast(bcx, slice_begin, llty_array.ptr_to());
}
let slice_len_offset = C_uint(bcx.ccx(), diff);
let slice_len = Sub(bcx, len, slice_len_offset, DebugLoc::None);
let slice_ty = bcx.tcx().mk_imm_ref(bcx.tcx().mk_region(ty::ReErased),
bcx.tcx().mk_slice(unit_ty));
let scratch = rvalue_scratch_datum(bcx, slice_ty, "");
Store(bcx, slice_begin, expr::get_dataptr(bcx, scratch.val));
Store(bcx, slice_len, expr::get_meta(bcx, scratch.val));
scratch.val
}
fn extract_vec_elems<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
left_ty: Ty<'tcx>,
before: usize,
after: usize,
val: MatchInput)
-> ExtractedBlock<'blk, 'tcx> {
let _icx = push_ctxt("match::extract_vec_elems");
let vec_datum = match_datum(val, left_ty);
let (base, len) = vec_datum.get_vec_base_and_len(bcx);
let mut elems = vec![];
elems.extend((0..before).map(|i| GEPi(bcx, base, &[i])));
elems.extend((0..after).rev().map(|i| {
InBoundsGEP(bcx, base, &[
Sub(bcx, len, C_uint(bcx.ccx(), i + 1), DebugLoc::None)
])
}));
ExtractedBlock { vals: elems, bcx: bcx }
}
// Macro for deciding whether any of the remaining matches fit a given kind of
// pattern. Note that, because the macro is well-typed, either ALL of the
// matches should fit that sort of pattern or NONE (however, some of the
// matches may be wildcards like _ or identifiers).
macro_rules! any_pat {
($m:expr, $col:expr, $pattern:pat) => (
($m).iter().any(|br| {
match br.pats[$col].node {
$pattern => true,
_ => false
}
})
)
}
fn any_uniq_pat(m: &[Match], col: usize) -> bool {
any_pat!(m, col, PatKind::Box(_))
}
fn any_region_pat(m: &[Match], col: usize) -> bool {
any_pat!(m, col, PatKind::Ref(..))
}
fn any_irrefutable_adt_pat(tcx: TyCtxt, m: &[Match], col: usize) -> bool {
m.iter().any(|br| {
let pat = br.pats[col];
match pat.node {
PatKind::Tuple(..) => true,
PatKind::Struct(..) | PatKind::TupleStruct(..) | PatKind::Path(..) => {
match tcx.expect_def(pat.id) {
Def::Struct(..) | Def::TyAlias(..) | Def::AssociatedTy(..) => true,
_ => false,
}
}
_ => false
}
})
}
/// What to do when the pattern match fails.
enum FailureHandler {
Infallible,
JumpToBasicBlock(BasicBlockRef),
Unreachable
}
impl FailureHandler {
fn is_fallible(&self) -> bool {
match *self {
Infallible => false,
_ => true
}
}
fn is_infallible(&self) -> bool {
!self.is_fallible()
}
fn handle_fail(&self, bcx: Block) {
match *self {
Infallible =>
bug!("attempted to panic in a non-panicking panic handler!"),
JumpToBasicBlock(basic_block) =>
Br(bcx, basic_block, DebugLoc::None),
Unreachable =>
build::Unreachable(bcx)
}
}
}
fn pick_column_to_specialize(def_map: &RefCell<DefMap>, m: &[Match]) -> Option<usize> {
fn pat_score(def_map: &RefCell<DefMap>, pat: &hir::Pat) -> usize {
match pat.node {
PatKind::Binding(_, _, Some(ref inner)) => pat_score(def_map, &inner),
_ if pat_is_refutable(&def_map.borrow(), pat) => 1,
_ => 0
}
}
let column_score = |m: &[Match], col: usize| -> usize {
let total_score = m.iter()
.map(|row| row.pats[col])
.map(|pat| pat_score(def_map, pat))
.sum();
// Irrefutable columns always go first, they'd only be duplicated in the branches.
if total_score == 0 {
std::usize::MAX
} else {
total_score
}
};
let column_contains_any_nonwild_patterns = |&col: &usize| -> bool {
m.iter().any(|row| match row.pats[col].node {
PatKind::Wild => false,
_ => true
})
};
(0..m[0].pats.len())
.filter(column_contains_any_nonwild_patterns)
.map(|col| (col, column_score(m, col)))
.max_by_key(|&(_, score)| score)
.map(|(col, _)| col)
}
// Compiles a comparison between two things.
fn compare_values<'blk, 'tcx>(cx: Block<'blk, 'tcx>,
lhs: ValueRef,
rhs: ValueRef,
rhs_t: Ty<'tcx>,
debug_loc: DebugLoc)
-> Result<'blk, 'tcx> {
fn compare_str<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
lhs_data: ValueRef,
lhs_len: ValueRef,
rhs_data: ValueRef,
rhs_len: ValueRef,
rhs_t: Ty<'tcx>,
debug_loc: DebugLoc)
-> Result<'blk, 'tcx> {
let did = langcall(bcx.tcx(),
None,
&format!("comparison of `{}`", rhs_t),
StrEqFnLangItem);
let args = [lhs_data, lhs_len, rhs_data, rhs_len];
Callee::def(bcx.ccx(), did, bcx.tcx().mk_substs(Substs::empty()))
.call(bcx, debug_loc, ArgVals(&args), None)
}
let _icx = push_ctxt("compare_values");
if rhs_t.is_scalar() {
let cmp = compare_scalar_types(cx, lhs, rhs, rhs_t, hir::BiEq, debug_loc);
return Result::new(cx, cmp);
}
match rhs_t.sty {
ty::TyRef(_, mt) => match mt.ty.sty {
ty::TyStr => {
let lhs_data = Load(cx, expr::get_dataptr(cx, lhs));
let lhs_len = Load(cx, expr::get_meta(cx, lhs));
let rhs_data = Load(cx, expr::get_dataptr(cx, rhs));
let rhs_len = Load(cx, expr::get_meta(cx, rhs));
compare_str(cx, lhs_data, lhs_len, rhs_data, rhs_len, rhs_t, debug_loc)
}
ty::TyArray(ty, _) | ty::TySlice(ty) => match ty.sty {
ty::TyUint(ast::UintTy::U8) => {
// NOTE: cast &[u8] and &[u8; N] to &str and abuse the str_eq lang item,
// which calls memcmp().
let pat_len = val_ty(rhs).element_type().array_length();
let ty_str_slice = cx.tcx().mk_static_str();
let rhs_data = GEPi(cx, rhs, &[0, 0]);
let rhs_len = C_uint(cx.ccx(), pat_len);
let lhs_data;
let lhs_len;
if val_ty(lhs) == val_ty(rhs) {
// Both the discriminant and the pattern are thin pointers
lhs_data = GEPi(cx, lhs, &[0, 0]);
lhs_len = C_uint(cx.ccx(), pat_len);
} else {
// The discriminant is a fat pointer
let llty_str_slice = type_of::type_of(cx.ccx(), ty_str_slice).ptr_to();
let lhs_str = PointerCast(cx, lhs, llty_str_slice);
lhs_data = Load(cx, expr::get_dataptr(cx, lhs_str));
lhs_len = Load(cx, expr::get_meta(cx, lhs_str));
}
compare_str(cx, lhs_data, lhs_len, rhs_data, rhs_len, rhs_t, debug_loc)
},
_ => bug!("only byte strings supported in compare_values"),
},
_ => bug!("only string and byte strings supported in compare_values"),
},
_ => bug!("only scalars, byte strings, and strings supported in compare_values"),
}
}
/// For each binding in `data.bindings_map`, adds an appropriate entry into the `fcx.lllocals` map
fn insert_lllocals<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
bindings_map: &BindingsMap<'tcx>,
cs: Option<cleanup::ScopeId>)
-> Block<'blk, 'tcx> {
for (&name, &binding_info) in bindings_map {
let (llval, aliases_other_state) = match binding_info.trmode {
// By value mut binding for a copy type: load from the ptr
// into the matched value and copy to our alloca
TrByCopy(llbinding) |
TrByMoveIntoCopy(llbinding) => {
let llval = Load(bcx, binding_info.llmatch);
let lvalue = match binding_info.trmode {
TrByCopy(..) =>
Lvalue::new("_match::insert_lllocals"),
TrByMoveIntoCopy(..) => {
// match_input moves from the input into a
// separate stack slot.
//
// E.g. consider moving the value `D(A)` out
// of the tuple `(D(A), D(B))` and into the
// local variable `x` via the pattern `(x,_)`,
// leaving the remainder of the tuple `(_,
// D(B))` still to be dropped in the future.
//
// Thus, here we must zero the place that we
// are moving *from*, because we do not yet
// track drop flags for a fragmented parent
// match input expression.
//
// Longer term we will be able to map the move
// into `(x, _)` up to the parent path that
// owns the whole tuple, and mark the
// corresponding stack-local drop-flag
// tracking the first component of the tuple.
let hint_kind = HintKind::ZeroAndMaintain;
Lvalue::new_with_hint("_match::insert_lllocals (match_input)",
bcx, binding_info.id, hint_kind)
}
_ => bug!(),
};
let datum = Datum::new(llval, binding_info.ty, lvalue);
call_lifetime_start(bcx, llbinding);
bcx = datum.store_to(bcx, llbinding);
if let Some(cs) = cs {
bcx.fcx.schedule_lifetime_end(cs, llbinding);
}
(llbinding, false)
},
// By value move bindings: load from the ptr into the matched value
TrByMoveRef => (Load(bcx, binding_info.llmatch), true),
// By ref binding: use the ptr into the matched value
TrByRef => (binding_info.llmatch, true),
};
// A local that aliases some other state must be zeroed, since
// the other state (e.g. some parent data that we matched
// into) will still have its subcomponents (such as this
// local) destructed at the end of the parent's scope. Longer
// term, we will properly map such parents to the set of
// unique drop flags for its fragments.
let hint_kind = if aliases_other_state {
HintKind::ZeroAndMaintain
} else {
HintKind::DontZeroJustUse
};
let lvalue = Lvalue::new_with_hint("_match::insert_lllocals (local)",
bcx,
binding_info.id,
hint_kind);
let datum = Datum::new(llval, binding_info.ty, lvalue);
if let Some(cs) = cs {
let opt_datum = lvalue.dropflag_hint(bcx);
bcx.fcx.schedule_lifetime_end(cs, binding_info.llmatch);
bcx.fcx.schedule_drop_and_fill_mem(cs, llval, binding_info.ty, opt_datum);
}
debug!("binding {} to {:?}", binding_info.id, Value(llval));
bcx.fcx.lllocals.borrow_mut().insert(binding_info.id, datum);
debuginfo::create_match_binding_metadata(bcx, name, binding_info);
}
bcx
}
fn compile_guard<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
guard_expr: &hir::Expr,
data: &ArmData<'p, 'blk, 'tcx>,
m: &[Match<'a, 'p, 'blk, 'tcx>],
vals: &[MatchInput],
chk: &FailureHandler,
has_genuine_default: bool)
-> Block<'blk, 'tcx> {
debug!("compile_guard(bcx={}, guard_expr={:?}, m={:?}, vals={:?})",
bcx.to_str(), guard_expr, m, vals);
let _indenter = indenter();
let mut bcx = insert_lllocals(bcx, &data.bindings_map, None);
let val = unpack_datum!(bcx, expr::trans(bcx, guard_expr));
let val = val.to_llbool(bcx);
for (_, &binding_info) in &data.bindings_map {
if let Some(llbinding) = binding_info.trmode.alloca_if_copy() {
call_lifetime_end(bcx, llbinding)
}
}
for (_, &binding_info) in &data.bindings_map {
bcx.fcx.lllocals.borrow_mut().remove(&binding_info.id);
}
with_cond(bcx, Not(bcx, val, guard_expr.debug_loc()), |bcx| {
for (_, &binding_info) in &data.bindings_map {
call_lifetime_end(bcx, binding_info.llmatch);
}
match chk {
// If the default arm is the only one left, move on to the next
// condition explicitly rather than (possibly) falling back to
// the default arm.
&JumpToBasicBlock(_) if m.len() == 1 && has_genuine_default => {
chk.handle_fail(bcx);
}
_ => {
compile_submatch(bcx, m, vals, chk, has_genuine_default);
}
};
bcx
})
}
fn | <'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
m: &[Match<'a, 'p, 'blk, 'tcx>],
vals: &[MatchInput],
chk: &FailureHandler,
has_genuine_default: bool) {
debug!("compile_submatch(bcx={}, m={:?}, vals=[{:?}])",
bcx.to_str(), m, vals);
let _indenter = indenter();
let _icx = push_ctxt("match::compile_submatch");
let mut bcx = bcx;
if m.is_empty() {
if chk.is_fallible() {
chk.handle_fail(bcx);
}
return;
}
let tcx = bcx.tcx();
let def_map = &tcx.def_map;
match pick_column_to_specialize(def_map, m) {
Some(col) => {
let val = vals[col];
if has_nested_bindings(m, col) {
let expanded = expand_nested_bindings(bcx, m, col, val);
compile_submatch_continue(bcx,
&expanded[..],
vals,
chk,
col,
val,
has_genuine_default)
} else {
compile_submatch_continue(bcx, m, vals, chk, col, val, has_genuine_default)
}
}
None => {
let data = &m[0].data;
for &(ref name, ref value_ptr) in &m[0].bound_ptrs {
let binfo = *data.bindings_map.get(name).unwrap();
call_lifetime_start(bcx, binfo.llmatch);
if binfo.trmode == TrByRef && type_is_fat_ptr(bcx.tcx(), binfo.ty) {
expr::copy_fat_ptr(bcx, *value_ptr, binfo.llmatch);
}
else {
Store(bcx, *value_ptr, binfo.llmatch);
}
}
match data.arm.guard {
Some(ref guard_expr) => {
bcx = compile_guard(bcx,
&guard_expr,
m[0].data,
&m[1..m.len()],
vals,
chk,
has_genuine_default);
}
_ => ()
}
Br(bcx, data.bodycx.llbb, DebugLoc::None);
}
}
}
fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
m: &[Match<'a, 'p, 'blk, 'tcx>],
vals: &[MatchInput],
chk: &FailureHandler,
col: usize,
val: MatchInput,
has_genuine_default: bool) {
let fcx = bcx.fcx;
let tcx = bcx.tcx();
let mut vals_left = vals[0..col].to_vec();
vals_left.extend_from_slice(&vals[col + 1..]);
let ccx = bcx.fcx.ccx;
// Find a real id (we're adding placeholder wildcard patterns, but
// each column is guaranteed to have at least one real pattern)
let pat_id = m.iter().map(|br| br.pats[col].id)
.find(|&id| id != DUMMY_NODE_ID)
.unwrap_or(DUMMY_NODE_ID);
let left_ty = if pat_id == DUMMY_NODE_ID {
tcx.mk_nil()
} else {
node_id_type(bcx, pat_id)
};
let mcx = check_match::MatchCheckCtxt {
tcx: bcx.tcx(),
param_env: bcx.tcx().empty_parameter_environment(),
};
let adt_vals = if any_irrefutable_adt_pat(bcx.tcx(), m, col) {
let repr = adt::represent_type(bcx.ccx(), left_ty);
let arg_count = adt::num_args(&repr, Disr(0));
let (arg_count, struct_val) = if type_is_sized(bcx.tcx(), left_ty) {
(arg_count, val.val)
} else {
// For an unsized ADT (i.e. DST struct), we need to treat
// the last field specially: instead of simply passing a
// ValueRef pointing to that field, as with all the others,
// we skip it and instead construct a 'fat ptr' below.
(arg_count - 1, Load(bcx, expr::get_dataptr(bcx, val.val)))
};
let mut field_vals: Vec<ValueRef> = (0..arg_count).map(|ix|
// By definition, these are all sized
adt::trans_field_ptr(bcx, &repr, adt::MaybeSizedValue::sized(struct_val), Disr(0), ix)
).collect();
match left_ty.sty {
ty::TyStruct(def, substs) if !type_is_sized(bcx.tcx(), left_ty) => {
// The last field is technically unsized but
// since we can only ever match that field behind
// a reference we construct a fat ptr here.
let unsized_ty = def.struct_variant().fields.last().map(|field| {
monomorphize::field_ty(bcx.tcx(), substs, field)
}).unwrap();
let scratch = alloc_ty(bcx, unsized_ty, "__struct_field_fat_ptr");
let meta = Load(bcx, expr::get_meta(bcx, val.val));
let struct_val = adt::MaybeSizedValue::unsized_(struct_val, meta);
let data = adt::trans_field_ptr(bcx, &repr, struct_val, Disr(0), arg_count);
Store(bcx, data, expr::get_dataptr(bcx, scratch));
Store(bcx, meta, expr::get_meta(bcx, scratch));
field_vals.push(scratch);
}
_ => {}
}
Some(field_vals)
} else if any_uniq_pat(m, col) || any_region_pat(m, col) {
let ptr = if type_is_fat_ptr(bcx.tcx(), left_ty) {
val.val
} else {
Load(bcx, val.val)
};
Some(vec!(ptr))
} else {
match left_ty.sty {
ty::TyArray(_, n) => {
let args = extract_vec_elems(bcx, left_ty, n, 0, val);
Some(args.vals)
}
_ => None
}
};
match adt_vals {
Some(field_vals) => {
let pats = enter_match(bcx, m, col, val, |pats|
check_match::specialize(&mcx, pats,
&Constructor::Single, col,
field_vals.len())
);
let mut vals: Vec<_> = field_vals.into_iter()
.map(|v|MatchInput::from_val(v))
.collect();
vals.extend_from_slice(&vals_left);
compile_submatch(bcx, &pats, &vals, chk, has_genuine_default);
return;
}
_ => ()
}
// Decide what kind of branch we need
let opts = get_branches(bcx, m, col);
debug!("options={:?}", opts);
let mut kind = NoBranch;
let mut test_val = val.val;
debug!("test_val={:?}", Value(test_val));
if !opts.is_empty() {
match opts[0] {
ConstantValue(..) | ConstantRange(..) => {
test_val = load_if_immediate(bcx, val.val, left_ty);
kind = if left_ty.is_integral() {
Switch
} else {
Compare
};
}
Variant(_, ref repr, _, _) => {
let (the_kind, val_opt) = adt::trans_switch(bcx, &repr,
val.val, true);
kind = the_kind;
if let Some(tval) = val_opt { test_val = tval; }
}
SliceLengthEqual(..) | SliceLengthGreaterOrEqual(..) => {
let (_, len) = tvec::get_base_and_len(bcx, val.val, left_ty);
test_val = len;
kind = Switch;
}
}
}
for o in &opts {
match *o {
ConstantRange(..) => { kind = Compare; break },
SliceLengthGreaterOrEqual(..) => { kind = CompareSliceLength; break },
_ => ()
}
}
let else_cx = match kind {
NoBranch | Single => bcx,
_ => bcx.fcx.new_temp_block("match_else")
};
let sw = if kind == Switch {
build::Switch(bcx, test_val, else_cx.llbb, opts.len())
} else {
C_int(ccx, 0) // Placeholder for when not using a switch
};
let defaults = enter_default(else_cx, m, col, val);
let exhaustive = chk.is_infallible() && defaults.is_empty();
let len = opts.len();
if exhaustive && kind == Switch {
build::Unreachable(else_cx);
}
// Compile subtrees for each option
for (i, opt) in opts.iter().enumerate() {
// In some cases of range and vector pattern matching, we need to
// override the failure case so that instead of failing, it proceeds
// to try more matching. branch_chk, then, is the proper failure case
// for the current conditional branch.
let mut branch_chk = None;
let mut opt_cx = else_cx;
let debug_loc = opt.debug_loc();
if kind == Switch || !exhaustive || i + 1 < len {
opt_cx = bcx.fcx.new_temp_block("match_case");
match kind {
Single => Br(bcx, opt_cx.llbb, debug_loc),
Switch => {
match opt.trans(bcx) {
SingleResult(r) => {
AddCase(sw, r.val, opt_cx.llbb);
bcx = r.bcx;
}
_ => {
bug!(
"in compile_submatch, expected \
opt.trans() to return a SingleResult")
}
}
}
Compare | CompareSliceLength => {
let t = if kind == Compare {
left_ty
} else {
tcx.types.usize // vector length
};
let Result { bcx: after_cx, val: matches } = {
match opt.trans(bcx) {
SingleResult(Result { bcx, val }) => {
compare_values(bcx, test_val, val, t, debug_loc)
}
RangeResult(Result { val: vbegin, .. },
Result { bcx, val: vend }) => {
let llge = compare_scalar_types(bcx, test_val, vbegin,
t, hir::BiGe, debug_loc);
let llle = compare_scalar_types(bcx, test_val, vend,
t, hir::BiLe, debug_loc);
Result::new(bcx, And(bcx, llge, llle, DebugLoc::None))
}
LowerBound(Result { bcx, val }) => {
Result::new(bcx, compare_scalar_types(bcx, test_val,
val, t, hir::BiGe,
debug_loc))
}
}
};
bcx = fcx.new_temp_block("compare_next");
// If none of the sub-cases match, and the current condition
// is guarded or has multiple patterns, move on to the next
// condition, if there is any, rather than falling back to
// the default.
let guarded = m[i].data.arm.guard.is_some();
let multi_pats = m[i].pats.len() > 1;
if i + 1 < len && (guarded || multi_pats || kind == CompareSliceLength) {
branch_chk = Some(JumpToBasicBlock(bcx.llbb));
}
CondBr(after_cx, matches, opt_cx.llbb, bcx.llbb, debug_loc);
}
_ => ()
}
} else if kind == Compare || kind == CompareSliceLength {
Br(bcx, else_cx.llbb, debug_loc);
}
let mut size = 0;
let mut unpacked = Vec::new();
match *opt {
Variant(disr_val, ref repr, _, _) => {
let ExtractedBlock {vals: argvals, bcx: new_bcx} =
extract_variant_args(opt_cx, &repr, disr_val, val);
size = argvals.len();
unpacked = argvals;
opt_cx = new_bcx;
}
SliceLengthEqual(len, _) => {
let args = extract_vec_elems(opt_cx, left_ty, len, 0, val);
size = args.vals.len();
unpacked = args.vals.clone();
opt_cx = args.bcx;
}
SliceLengthGreaterOrEqual(before, after, _) => {
let args = extract_vec_elems(opt_cx, left_ty, before, after, val);
size = args.vals.len();
unpacked = args.vals.clone();
opt_cx = args.bcx;
}
ConstantValue(..) | ConstantRange(..) => ()
}
let opt_ms = enter_opt(opt_cx, pat_id, m, opt, col, size, val);
let mut opt_vals: Vec<_> = unpacked.into_iter()
.map(|v|MatchInput::from_val(v))
.collect();
opt_vals.extend_from_slice(&vals_left[..]);
compile_submatch(opt_cx,
&opt_ms[..],
&opt_vals[..],
branch_chk.as_ref().unwrap_or(chk),
has_genuine_default);
}
// Compile the fall-through case, if any
if !exhaustive && kind != Single {
if kind == Compare || kind == CompareSliceLength {
Br(bcx, else_cx.llbb, DebugLoc::None);
}
match chk {
// If there is only one default arm left, move on to the next
// condition explicitly rather than (eventually) falling back to
// the last default arm.
&JumpToBasicBlock(_) if defaults.len() == 1 && has_genuine_default => {
chk.handle_fail(else_cx);
}
_ => {
compile_submatch(else_cx,
&defaults[..],
&vals_left[..],
chk,
has_genuine_default);
}
}
}
}
pub fn trans_match<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
match_expr: &hir::Expr,
discr_expr: &hir::Expr,
arms: &[hir::Arm],
dest: Dest)
-> Block<'blk, 'tcx> {
let _icx = push_ctxt("match::trans_match");
trans_match_inner(bcx, match_expr.id, discr_expr, arms, dest)
}
/// Checks whether the binding in `discr` is assigned to anywhere in the expression `body`
fn is_discr_reassigned(bcx: Block, discr: &hir::Expr, body: &hir::Expr) -> bool {
let (vid, field) = match discr.node {
hir::ExprPath(..) => match bcx.tcx().expect_def(discr.id) {
Def::Local(_, vid) | Def::Upvar(_, vid, _, _) => (vid, None),
_ => return false
},
hir::ExprField(ref base, field) => {
let vid = match bcx.tcx().expect_def_or_none(base.id) {
Some(Def::Local(_, vid)) | Some(Def::Upvar(_, vid, _, _)) => vid,
_ => return false
};
(vid, Some(mc::NamedField(field.node)))
},
hir::ExprTupField(ref base, field) => {
let vid = match bcx.tcx().expect_def_or_none(base.id) {
Some(Def::Local(_, vid)) | Some(Def::Upvar(_, vid, _, _)) => vid,
_ => return false
};
(vid, Some(mc::PositionalField(field.node)))
},
_ => return false
};
let mut rc = ReassignmentChecker {
node: vid,
field: field,
reassigned: false
};
bcx.tcx().normalizing_infer_ctxt(ProjectionMode::Any).enter(|infcx| {
let mut visitor = euv::ExprUseVisitor::new(&mut rc, &infcx);
visitor.walk_expr(body);
});
rc.reassigned
}
struct ReassignmentChecker {
node: ast::NodeId,
field: Option<mc::FieldName>,
reassigned: bool
}
// Determine if the expression we're matching on is reassigned to within
// the body of the match's arm.
// We only care for the `mutate` callback since this check only matters
// for cases where the matched value is moved.
impl<'tcx> euv::Delegate<'tcx> for ReassignmentChecker {
fn consume(&mut self, _: ast::NodeId, _: Span, _: mc::cmt, _: euv::ConsumeMode) {}
fn matched_pat(&mut self, _: &hir::Pat, _: mc::cmt, _: euv::MatchMode) {}
fn consume_pat(&mut self, _: &hir::Pat, _: mc::cmt, _: euv::ConsumeMode) {}
fn borrow(&mut self, _: ast::NodeId, _: Span, _: mc::cmt, _: ty::Region,
_: ty::BorrowKind, _: euv::LoanCause) {}
fn decl_without_init(&mut self, _: ast::NodeId, _: Span) {}
fn mutate(&mut self, _: ast::NodeId, _: Span, cmt: mc::cmt, _: euv::MutateMode) {
let cmt_id = |cmt: &mc::cmt| match cmt.cat {
Categorization::Upvar(mc::Upvar { id: ty::UpvarId { var_id: vid, ..}, ..}) |
Categorization::Local(vid) => Some(vid),
Categorization::Interior(ref base_cmt, mc::InteriorField(_)) => Some(base_cmt.id),
_ => None
};
match cmt.cat {
Categorization::Upvar(mc::Upvar { id: ty::UpvarId { var_id: vid, .. }, .. }) |
Categorization::Local(vid) => self.reassigned |= self.node == vid,
ref cat => {
let mut cat = cat;
while let &Categorization::Interior(ref base_cmt, mc::InteriorField(field)) = cat {
if let Some(vid) = cmt_id(base_cmt) {
if self.node == vid && (self.field.is_none() || self.field == Some(field)) {
self.reassigned = true;
return;
}
}
cat = &base_cmt.cat;
}
}
}
}
}
fn create_bindings_map<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, pat: &hir::Pat,
discr: &hir::Expr, body: &hir::Expr)
-> BindingsMap<'tcx> {
// Create the bindings map, which is a mapping from each binding name
// to an alloca() that will be the value for that local variable.
// Note that we use the names because each binding will have many ids
// from the various alternatives.
let ccx = bcx.ccx();
let reassigned = is_discr_reassigned(bcx, discr, body);
let mut bindings_map = FnvHashMap();
pat_bindings(&pat, |bm, p_id, span, path1| {
let name = path1.node;
let variable_ty = node_id_type(bcx, p_id);
let llvariable_ty = type_of::type_of(ccx, variable_ty);
let tcx = bcx.tcx();
let param_env = tcx.empty_parameter_environment();
let llmatch;
let trmode;
let moves_by_default = variable_ty.moves_by_default(tcx, ¶m_env, span);
match bm {
hir::BindByValue(_) if !moves_by_default || reassigned =>
{
llmatch = alloca(bcx, llvariable_ty.ptr_to(), "__llmatch");
let llcopy = alloca(bcx, llvariable_ty, &bcx.name(name));
trmode = if moves_by_default {
TrByMoveIntoCopy(llcopy)
} else {
TrByCopy(llcopy)
};
}
hir::BindByValue(_) => {
// in this case, the final type of the variable will be T,
// but during matching we need to store a *T as explained
// above
llmatch = alloca(bcx, llvariable_ty.ptr_to(), &bcx.name(name));
trmode = TrByMoveRef;
}
hir::BindByRef(_) => {
llmatch = alloca(bcx, llvariable_ty, &bcx.name(name));
trmode = TrByRef;
}
};
bindings_map.insert(name, BindingInfo {
llmatch: llmatch,
trmode: trmode,
id: p_id,
span: span,
ty: variable_ty
});
});
return bindings_map;
}
fn trans_match_inner<'blk, 'tcx>(scope_cx: Block<'blk, 'tcx>,
match_id: ast::NodeId,
discr_expr: &hir::Expr,
arms: &[hir::Arm],
dest: Dest) -> Block<'blk, 'tcx> {
let _icx = push_ctxt("match::trans_match_inner");
let fcx = scope_cx.fcx;
let mut bcx = scope_cx;
let tcx = bcx.tcx();
let discr_datum = unpack_datum!(bcx, expr::trans_to_lvalue(bcx, discr_expr,
"match"));
if bcx.unreachable.get() {
return bcx;
}
let t = node_id_type(bcx, discr_expr.id);
let chk = if t.is_empty(tcx) {
Unreachable
} else {
Infallible
};
let arm_datas: Vec<ArmData> = arms.iter().map(|arm| ArmData {
bodycx: fcx.new_id_block("case_body", arm.body.id),
arm: arm,
bindings_map: create_bindings_map(bcx, &arm.pats[0], discr_expr, &arm.body)
}).collect();
let mut pat_renaming_map = if scope_cx.sess().opts.debuginfo != NoDebugInfo {
Some(FnvHashMap())
} else {
None
};
let arm_pats: Vec<Vec<P<hir::Pat>>> = {
let mut static_inliner = StaticInliner::new(scope_cx.tcx(),
pat_renaming_map.as_mut());
arm_datas.iter().map(|arm_data| {
arm_data.arm.pats.iter().map(|p| static_inliner.fold_pat((*p).clone())).collect()
}).collect()
};
let mut matches = Vec::new();
for (arm_data, pats) in arm_datas.iter().zip(&arm_pats) {
matches.extend(pats.iter().map(|p| Match {
pats: vec![&p],
data: arm_data,
bound_ptrs: Vec::new(),
pat_renaming_map: pat_renaming_map.as_ref()
}));
}
// `compile_submatch` works one column of arm patterns a time and
// then peels that column off. So as we progress, it may become
// impossible to tell whether we have a genuine default arm, i.e.
// `_ => foo` or not. Sometimes it is important to know that in order
// to decide whether moving on to the next condition or falling back
// to the default arm.
let has_default = arms.last().map_or(false, |arm| {
arm.pats.len() == 1
&& arm.pats.last().unwrap().node == PatKind::Wild
});
compile_submatch(bcx, &matches[..], &[discr_datum.match_input()], &chk, has_default);
let mut arm_cxs = Vec::new();
for arm_data in &arm_datas {
let mut bcx = arm_data.bodycx;
// insert bindings into the lllocals map and add cleanups
let cs = fcx.push_custom_cleanup_scope();
bcx = insert_lllocals(bcx, &arm_data.bindings_map, Some(cleanup::CustomScope(cs)));
bcx = expr::trans_into(bcx, &arm_data.arm.body, dest);
bcx = fcx.pop_and_trans_custom_cleanup_scope(bcx, cs);
arm_cxs.push(bcx);
}
bcx = scope_cx.fcx.join_blocks(match_id, &arm_cxs[..]);
return bcx;
}
/// Generates code for a local variable declaration like `let <pat>;` or `let <pat> =
/// <opt_init_expr>`.
pub fn store_local<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
local: &hir::Local)
-> Block<'blk, 'tcx> {
let _icx = push_ctxt("match::store_local");
let mut bcx = bcx;
let tcx = bcx.tcx();
let pat = &local.pat;
fn create_dummy_locals<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
pat: &hir::Pat)
-> Block<'blk, 'tcx> {
let _icx = push_ctxt("create_dummy_locals");
// create dummy memory for the variables if we have no
// value to store into them immediately
let tcx = bcx.tcx();
pat_bindings(pat, |_, p_id, _, path1| {
let scope = cleanup::var_scope(tcx, p_id);
bcx = mk_binding_alloca(
bcx, p_id, path1.node, scope, (),
"_match::store_local::create_dummy_locals",
|(), bcx, Datum { val: llval, ty, kind }| {
// Dummy-locals start out uninitialized, so set their
// drop-flag hints (if any) to "moved."
if let Some(hint) = kind.dropflag_hint(bcx) {
let moved_hint = adt::DTOR_MOVED_HINT;
debug!("store moved_hint={} for hint={:?}, uninitialized dummy",
moved_hint, hint);
Store(bcx, C_u8(bcx.fcx.ccx, moved_hint), hint.to_value().value());
}
if kind.drop_flag_info.must_zero() {
// if no drop-flag hint, or the hint requires
// we maintain the embedded drop-flag, then
// mark embedded drop-flag(s) as moved
// (i.e. "already dropped").
drop_done_fill_mem(bcx, llval, ty);
}
bcx
});
});
bcx
}
match local.init {
Some(ref init_expr) => {
// Optimize the "let x = expr" case. This just writes
// the result of evaluating `expr` directly into the alloca
// for `x`. Often the general path results in similar or the
// same code post-optimization, but not always. In particular,
// in unsafe code, you can have expressions like
//
// let x = intrinsics::uninit();
//
// In such cases, the more general path is unsafe, because
// it assumes it is matching against a valid value.
if let Some(name) = simple_name(pat) {
let var_scope = cleanup::var_scope(tcx, local.id);
return mk_binding_alloca(
bcx, pat.id, name, var_scope, (),
"_match::store_local",
|(), bcx, Datum { val: v, .. }| expr::trans_into(bcx, &init_expr,
expr::SaveIn(v)));
}
// General path.
let init_datum =
unpack_datum!(bcx, expr::trans_to_lvalue(bcx, &init_expr, "let"));
if bcx.sess().asm_comments() {
add_comment(bcx, "creating zeroable ref llval");
}
let var_scope = cleanup::var_scope(tcx, local.id);
bind_irrefutable_pat(bcx, pat, init_datum.match_input(), var_scope)
}
None => {
create_dummy_locals(bcx, pat)
}
}
}
fn mk_binding_alloca<'blk, 'tcx, A, F>(bcx: Block<'blk, 'tcx>,
p_id: ast::NodeId,
name: ast::Name,
cleanup_scope: cleanup::ScopeId,
arg: A,
caller_name: &'static str,
populate: F)
-> Block<'blk, 'tcx> where
F: FnOnce(A, Block<'blk, 'tcx>, Datum<'tcx, Lvalue>) -> Block<'blk, 'tcx>,
{
let var_ty = node_id_type(bcx, p_id);
// Allocate memory on stack for the binding.
let llval = alloc_ty(bcx, var_ty, &bcx.name(name));
let lvalue = Lvalue::new_with_hint(caller_name, bcx, p_id, HintKind::DontZeroJustUse);
let datum = Datum::new(llval, var_ty, lvalue);
debug!("mk_binding_alloca cleanup_scope={:?} llval={:?} var_ty={:?}",
cleanup_scope, Value(llval), var_ty);
// Subtle: be sure that we *populate* the memory *before*
// we schedule the cleanup.
call_lifetime_start(bcx, llval);
let bcx = populate(arg, bcx, datum);
bcx.fcx.schedule_lifetime_end(cleanup_scope, llval);
bcx.fcx.schedule_drop_mem(cleanup_scope, llval, var_ty, lvalue.dropflag_hint(bcx));
// Now that memory is initialized and has cleanup scheduled,
// insert datum into the local variable map.
bcx.fcx.lllocals.borrow_mut().insert(p_id, datum);
bcx
}
/// A simple version of the pattern matching code that only handles
/// irrefutable patterns. This is used in let/argument patterns,
/// not in match statements. Unifying this code with the code above
/// sounds nice, but in practice it produces very inefficient code,
/// since the match code is so much more general. In most cases,
/// LLVM is able to optimize the code, but it causes longer compile
/// times and makes the generated code nigh impossible to read.
///
/// # Arguments
/// - bcx: starting basic block context
/// - pat: the irrefutable pattern being matched.
/// - val: the value being matched -- must be an lvalue (by ref, with cleanup)
pub fn bind_irrefutable_pat<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
pat: &hir::Pat,
val: MatchInput,
cleanup_scope: cleanup::ScopeId)
-> Block<'blk, 'tcx> {
debug!("bind_irrefutable_pat(bcx={}, pat={:?}, val={:?})",
bcx.to_str(), pat, val);
if bcx.sess().asm_comments() {
add_comment(bcx, &format!("bind_irrefutable_pat(pat={:?})",
pat));
}
let _indenter = indenter();
let _icx = push_ctxt("match::bind_irrefutable_pat");
let mut bcx = bcx;
let tcx = bcx.tcx();
let ccx = bcx.ccx();
match pat.node {
PatKind::Binding(pat_binding_mode, ref path1, ref inner) => {
// Allocate the stack slot where the value of this
// binding will live and place it into the appropriate
// map.
bcx = mk_binding_alloca(bcx, pat.id, path1.node, cleanup_scope, (),
"_match::bind_irrefutable_pat",
|(), bcx, Datum { val: llval, ty, kind: _ }| {
match pat_binding_mode {
hir::BindByValue(_) => {
// By value binding: move the value that `val`
// points at into the binding's stack slot.
let d = val.to_datum(ty);
d.store_to(bcx, llval)
}
hir::BindByRef(_) => {
// By ref binding: the value of the variable
// is the pointer `val` itself or fat pointer referenced by `val`
if type_is_fat_ptr(bcx.tcx(), ty) {
expr::copy_fat_ptr(bcx, val.val, llval);
}
else {
Store(bcx, val.val, llval);
}
bcx
}
}
});
if let Some(ref inner_pat) = *inner {
bcx = bind_irrefutable_pat(bcx, &inner_pat, val, cleanup_scope);
}
}
PatKind::TupleStruct(_, ref sub_pats, ddpos) => {
match bcx.tcx().expect_def(pat.id) {
Def::Variant(enum_id, var_id) => {
let repr = adt::represent_node(bcx, pat.id);
let vinfo = ccx.tcx().lookup_adt_def(enum_id).variant_with_id(var_id);
let args = extract_variant_args(bcx,
&repr,
Disr::from(vinfo.disr_val),
val);
for (i, subpat) in sub_pats.iter()
.enumerate_and_adjust(vinfo.fields.len(), ddpos) {
bcx = bind_irrefutable_pat(
bcx,
subpat,
MatchInput::from_val(args.vals[i]),
cleanup_scope);
}
}
Def::Struct(..) => {
let expected_len = match *ccx.tcx().pat_ty(&pat) {
ty::TyS{sty: ty::TyStruct(adt_def, _), ..} => {
adt_def.struct_variant().fields.len()
}
ref ty => {
span_bug!(pat.span, "tuple struct pattern unexpected type {:?}", ty);
}
};
let repr = adt::represent_node(bcx, pat.id);
let val = adt::MaybeSizedValue::sized(val.val);
for (i, elem) in sub_pats.iter().enumerate_and_adjust(expected_len, ddpos) {
let fldptr = adt::trans_field_ptr(bcx, &repr, val, Disr(0), i);
bcx = bind_irrefutable_pat(
bcx,
&elem,
MatchInput::from_val(fldptr),
cleanup_scope);
}
}
_ => {
// Nothing to do here.
}
}
}
PatKind::Struct(_, ref fields, _) => {
let tcx = bcx.tcx();
let pat_ty = node_id_type(bcx, pat.id);
let pat_repr = adt::represent_type(bcx.ccx(), pat_ty);
let pat_v = VariantInfo::of_node(tcx, pat_ty, pat.id);
let val = if type_is_sized(tcx, pat_ty) {
adt::MaybeSizedValue::sized(val.val)
} else {
let data = Load(bcx, expr::get_dataptr(bcx, val.val));
let meta = Load(bcx, expr::get_meta(bcx, val.val));
adt::MaybeSizedValue::unsized_(data, meta)
};
for f in fields {
let name = f.node.name;
let field_idx = pat_v.field_index(name);
let mut fldptr = adt::trans_field_ptr(
bcx,
&pat_repr,
val,
pat_v.discr,
field_idx);
let fty = pat_v.fields[field_idx].1;
// If it's not sized, then construct a fat pointer instead of
// a regular one
if !type_is_sized(tcx, fty) {
let scratch = alloc_ty(bcx, fty, "__struct_field_fat_ptr");
debug!("Creating fat pointer {:?}", Value(scratch));
Store(bcx, fldptr, expr::get_dataptr(bcx, scratch));
Store(bcx, val.meta, expr::get_meta(bcx, scratch));
fldptr = scratch;
}
bcx = bind_irrefutable_pat(bcx,
&f.node.pat,
MatchInput::from_val(fldptr),
cleanup_scope);
}
}
PatKind::Tuple(ref elems, ddpos) => {
match tcx.node_id_to_type(pat.id).sty {
ty::TyTuple(ref tys) => {
let repr = adt::represent_node(bcx, pat.id);
let val = adt::MaybeSizedValue::sized(val.val);
for (i, elem) in elems.iter().enumerate_and_adjust(tys.len(), ddpos) {
let fldptr = adt::trans_field_ptr(bcx, &repr, val, Disr(0), i);
bcx = bind_irrefutable_pat(
bcx,
&elem,
MatchInput::from_val(fldptr),
cleanup_scope);
}
}
ref sty => span_bug!(pat.span, "unexpected type for tuple pattern: {:?}", sty),
}
}
PatKind::Box(ref inner) => {
let pat_ty = node_id_type(bcx, inner.id);
// Pass along DSTs as fat pointers.
let val = if type_is_fat_ptr(tcx, pat_ty) {
// We need to check for this, as the pattern could be binding
// a fat pointer by-value.
if let PatKind::Binding(hir::BindByRef(..),_,_) = inner.node {
val.val
} else {
Load(bcx, val.val)
}
} else if type_is_sized(tcx, pat_ty) {
Load(bcx, val.val)
} else {
val.val
};
bcx = bind_irrefutable_pat(
bcx, &inner, MatchInput::from_val(val), cleanup_scope);
}
PatKind::Ref(ref inner, _) => {
let pat_ty = node_id_type(bcx, inner.id);
// Pass along DSTs as fat pointers.
let val = if type_is_fat_ptr(tcx, pat_ty) {
// We need to check for this, as the pattern could be binding
// a fat pointer by-value.
if let PatKind::Binding(hir::BindByRef(..),_,_) = inner.node {
val.val
} else {
Load(bcx, val.val)
}
} else if type_is_sized(tcx, pat_ty) {
Load(bcx, val.val)
} else {
val.val
};
bcx = bind_irrefutable_pat(
bcx,
&inner,
MatchInput::from_val(val),
cleanup_scope);
}
PatKind::Vec(ref before, ref slice, ref after) => {
let pat_ty = node_id_type(bcx, pat.id);
let mut extracted = extract_vec_elems(bcx, pat_ty, before.len(), after.len(), val);
match slice {
&Some(_) => {
extracted.vals.insert(
before.len(),
bind_subslice_pat(bcx, pat.id, val, before.len(), after.len())
);
}
&None => ()
}
bcx = before
.iter()
.chain(slice.iter())
.chain(after.iter())
.zip(extracted.vals)
.fold(bcx, |bcx, (inner, elem)| {
bind_irrefutable_pat(
bcx,
&inner,
MatchInput::from_val(elem),
cleanup_scope)
});
}
PatKind::Path(..) | PatKind::Wild |
PatKind::Lit(..) | PatKind::Range(..) => ()
}
return bcx;
}
| compile_submatch |
RemoveButton.js | import React from 'react';
import IconButton from 'material-ui/IconButton'; | import Clear from 'material-ui/svg-icons/content/clear';
import './RemoveButton.scss';
const RemoveButton = ({removePlayer, position}) => (
<div className="remove-btn"
onClick={() => removePlayer(position)}
>
<Clear />
</div>
);
export default RemoveButton; | |
api_op_GetDocumentVersion.go | // Code generated by smithy-go-codegen DO NOT EDIT.
package workdocs
import (
"context"
awsmiddleware "github.com/aws/aws-sdk-go-v2/aws/middleware"
"github.com/aws/aws-sdk-go-v2/aws/signer/v4"
"github.com/aws/aws-sdk-go-v2/service/workdocs/types"
"github.com/aws/smithy-go/middleware"
smithyhttp "github.com/aws/smithy-go/transport/http"
)
// Retrieves version metadata for the specified document.
func (c *Client) GetDocumentVersion(ctx context.Context, params *GetDocumentVersionInput, optFns ...func(*Options)) (*GetDocumentVersionOutput, error) {
if params == nil {
params = &GetDocumentVersionInput{}
}
result, metadata, err := c.invokeOperation(ctx, "GetDocumentVersion", params, optFns, c.addOperationGetDocumentVersionMiddlewares)
if err != nil {
return nil, err
}
out := result.(*GetDocumentVersionOutput)
out.ResultMetadata = metadata
return out, nil
}
type GetDocumentVersionInput struct {
// The ID of the document.
//
// This member is required.
DocumentId *string
// The version ID of the document.
//
// This member is required.
VersionId *string
// Amazon WorkDocs authentication token. Not required when using AWS administrator
// credentials to access the API.
AuthenticationToken *string
// A comma-separated list of values. Specify "SOURCE" to include a URL for the
// source document.
Fields *string
// Set this to TRUE to include custom metadata in the response.
IncludeCustomMetadata bool
}
type GetDocumentVersionOutput struct {
// The custom metadata on the document version.
CustomMetadata map[string]string
// The version metadata.
Metadata *types.DocumentVersionMetadata
// Metadata pertaining to the operation's result.
ResultMetadata middleware.Metadata
}
func (c *Client) addOperationGetDocumentVersionMiddlewares(stack *middleware.Stack, options Options) (err error) {
err = stack.Serialize.Add(&awsRestjson1_serializeOpGetDocumentVersion{}, middleware.After)
if err != nil {
return err
}
err = stack.Deserialize.Add(&awsRestjson1_deserializeOpGetDocumentVersion{}, middleware.After)
if err != nil {
return err
}
if err = addSetLoggerMiddleware(stack, options); err != nil {
return err
}
if err = awsmiddleware.AddClientRequestIDMiddleware(stack); err != nil {
return err
}
if err = smithyhttp.AddComputeContentLengthMiddleware(stack); err != nil {
return err
}
if err = addResolveEndpointMiddleware(stack, options); err != nil {
return err
}
if err = v4.AddComputePayloadSHA256Middleware(stack); err != nil {
return err
}
if err = addRetryMiddlewares(stack, options); err != nil {
return err
}
if err = addHTTPSignerV4Middleware(stack, options); err != nil {
return err
}
if err = awsmiddleware.AddRawResponseToMetadata(stack); err != nil {
return err
}
if err = awsmiddleware.AddRecordResponseTiming(stack); err != nil {
return err
}
if err = addClientUserAgent(stack); err != nil {
return err
}
if err = smithyhttp.AddErrorCloseResponseBodyMiddleware(stack); err != nil {
return err
}
if err = smithyhttp.AddCloseResponseBodyMiddleware(stack); err != nil {
return err
}
if err = addOpGetDocumentVersionValidationMiddleware(stack); err != nil |
if err = stack.Initialize.Add(newServiceMetadataMiddleware_opGetDocumentVersion(options.Region), middleware.Before); err != nil {
return err
}
if err = addRequestIDRetrieverMiddleware(stack); err != nil {
return err
}
if err = addResponseErrorMiddleware(stack); err != nil {
return err
}
if err = addRequestResponseLogging(stack, options); err != nil {
return err
}
return nil
}
func newServiceMetadataMiddleware_opGetDocumentVersion(region string) *awsmiddleware.RegisterServiceMetadata {
return &awsmiddleware.RegisterServiceMetadata{
Region: region,
ServiceID: ServiceID,
SigningName: "workdocs",
OperationName: "GetDocumentVersion",
}
}
| {
return err
} |
donate.rs | use super::prelude::*;
interaction_setup! {
name = "donate",
group = "utility",
description = "Support me peko!"
}
#[interaction_cmd]
pub async fn donate(
ctx: &Ctx,
interaction: &ApplicationCommandInteraction,
config: &Config,
) -> anyhow::Result<()> | {
interaction
.create_interaction_response(&ctx.http, |r| {
r.kind(InteractionResponseType::ChannelMessageWithSource)
.interaction_response_data(|d| {
d.flags(InteractionApplicationCommandCallbackDataFlags::EPHEMERAL)
.create_embed(|e| e
.title("Donation Information")
.colour(Colour::from_rgb(0xEC, 0x9C, 0xFC))
.description(
"*Almondo, almondo peko!*\n\n\
If you are interested in helping support my development, \
and invest in better hosting, we'd appreciate your support peko!\n\n\
Any amount is appreciated, and all donations will go directly towards development \
and new hardware peko!")
.field(
"Links",
"Donations can be made via either [GitHub Sponsors](https://github.com/sponsors/anden3) \
or [Ko-Fi](https://ko-fi.com/anden3) peko! \
Any amount is greatly appreciated peko!", false)
.field(
"Disclaimer",
"No donations will ever be required to access any features of the bot, \
so if you feel like you can't spare some extra money, please save it for yourself peko. \
Additionally, please consider that all donations are non-refundable peko.",
false)
.footer(|f| f.text("I am made by anden3#0003 peko!"))
)
})
})
.await
.context(here!())?;
Ok(())
} |
|
basic_ref_types.rs | #![allow(improper_ctypes)]
use marine_rs_sdk::marine;
fn | () {}
#[marine]
#[link(wasm_import_module = "arguments_passing_effector")]
extern "C" {
pub fn all_ref_types(
arg_0: &i8,
arg_1: &i16,
arg_2: &i32,
arg_3: &i64,
arg_4: &u8,
arg_5: &u16,
arg_6: &u32,
arg_7: &u64,
arg_8: &f32,
arg_9: &f64,
arg_10: &String,
arg_11: &Vec<u8>,
) -> Vec<u8>;
pub fn string_ref_type(arg: &String) -> String;
pub fn str_type(arg: &str) -> String;
pub fn bytearray_ref_type(arg: &Vec<u8>) -> Vec<u8>;
pub fn bool_ref_type(arg: &bool) -> bool;
pub fn f32_ref_type(arg: &f32) -> f32;
pub fn f64_ref_type(arg: &f64) -> f64;
pub fn u32_ref_type(arg: &u32) -> u32;
pub fn u64_ref_type(arg: &u64) -> u64;
pub fn i32_ref_type(arg: &i32) -> i32;
pub fn i64_ref_type(arg: &i64) -> i64;
}
| main |
utils.py | from datetime import timedelta, date
def | (local_date):
if isinstance(local_date, str):
d, m, y = local_date.split('.')
return '{0}-{1}-{2}'.format(y, m, d)
elif isinstance(local_date, date):
return local_date.strftime('%Y-%m-%d')
else:
return local_date
def req_timedelta(arg):
if isinstance(arg, timedelta):
return arg
else:
if isinstance(arg, str):
parts = arg.split(':')
try:
res = timedelta(hours=int(parts[0]), minutes=int(parts[1]))
except ValueError:
res = timedelta(0)
return res
else:
return timedelta(0)
def yesterday_local():
return (date.today() - timedelta(days=1)).strftime("%d.%m.%Y")
def stat_timedelta_for_report(time_delta, round_to_hour=True):
if time_delta:
sec = time_delta.total_seconds()
hours, remainder = divmod(sec, 3600)
if round_to_hour:
if remainder >= 1800:
hours += 1
return str(int(hours))
minutes, remainder = divmod(remainder, 60)
return "{0:,d}:{1:02}".format(int(hours), int(minutes)).replace(',',' ')
else:
return '-'
def custom_redirect(url_name, *args, **kwargs):
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.utils.http import urlencode
url = reverse(url_name, args=args)
params = urlencode(kwargs)
return HttpResponseRedirect(url + "?%s" % params)
| req_date |
error.go | // Copyright 2015 The etcd Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, | // See the License for the specific language governing permissions and
// limitations under the License.
package runner
import (
"fmt"
"os"
"github.com/dimandzhi/etcd/client"
)
const (
// http://tldp.org/LDP/abs/html/exitcodes.html
ExitSuccess = iota
ExitError
ExitBadConnection
ExitInvalidInput // for txn, watch command
ExitBadFeature // provided a valid flag with an unsupported value
ExitInterrupted
ExitIO
ExitBadArgs = 128
)
func ExitWithError(code int, err error) {
fmt.Fprintln(os.Stderr, "Error: ", err)
if cerr, ok := err.(*client.ClusterError); ok {
fmt.Fprintln(os.Stderr, cerr.Detail())
}
os.Exit(code)
} | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
getAUsersVideos.py | from TikTokApi import TikTokApi
# Starts TikTokApi
api = TikTokApi.get_instance()
# The Number of trending TikToks you want to be displayed
results = 10
# Returns a list of dictionaries of the trending object
userPosts = api.userPosts(
"6745191554350760966", | )
# Loops over every tiktok
for tiktok in userPosts:
# Prints the text of the tiktok
print(tiktok["desc"])
print(len(userPosts)) | "MS4wLjABAAAAM3R2BtjzVT-uAtstkl2iugMzC6AtnpkojJbjiOdDDrdsTiTR75-8lyWJCY5VvDrZ",
30, |
MicroBit.tsx | import React from 'react';
class MicroBit extends React.PureComponent {
render() {
return (
<div className="main-wrapper">
<h2 style={{padding: '5%'}}> microBit here ! </h2>
</div>
);
}
}
| export default MicroBit; |
|
getMeetings.js | import axios from "axios";
import {
GET_MEETING_CALLED,
GET_MEETING_RETURNED,
GET_MEETING_ERROR
} from "./types";
export const getMeetings = () => {
const header = { Authorization: localStorage.getItem("jwt") };
const local = "http://localhost:8080";
const server = process.env.REACT_APP_TOML_PRODUCTION_URL || local;
const promise = axios.get(`${server}/api/meeting/retrieve`, {
headers: header
});
return function(dispatch) {
dispatch({
type: GET_MEETING_CALLED
});
promise
.then(res => {
dispatch({
type: GET_MEETING_RETURNED,
payload: res.data | });
})
.catch(err => console.log(err));
dispatch({ type: GET_MEETING_ERROR });
};
}; | |
service.py | from twisted.application import internet, service
from twisted.web import server, resource, client
from twisted.internet import defer, reactor, threads, utils, task
from zope import interface
import yaml
import time
import cgi
import random
from distributex.backends import in_memory_backend, memcached_backend
class SiteRoot(resource.Resource):
isLeaf = True
addSlash = True
def __init__(self, config):
self.backends = {
'memcache': memcached_backend.MemcachedBackend,
'inmemory': in_memory_backend.InMemoryDictBackend
}
self.config = yaml.load(open(config))
self.ready = False
reactor.callWhenRunning(self.setup)
@defer.inlineCallbacks
def setup(self):
# Initialise the configured backend
self.backend = self.backends[
self.config.get('backend', 'inmemory')
](self.config)
self.pools = {}
# Construct our pools
for pool in self.config.get('pools', []):
if 'servers' in pool:
servers = pool['servers'].replace(' ', '').split(',')
else:
servers = []
self.pools[pool['name']] = servers
expire = pool.get('expire', 1800)
maxlocks = pool.get('maxlocks', 1)
yield defer.maybeDeferred(
self.backend.add_pool, pool['name'], expire, maxlocks=maxlocks
)
self.ready = True
defer.returnValue(None)
def request_finish(self, request, result):
request.write(result)
request.finish()
def stop_timer(self, timer):
if timer.running:
timer.stop()
def wait_finish(self, lock, request, timer):
self.stop_timer(timer)
self.request_finish(request, 'YES')
def wait_bailout(self, error, request, timer):
self.stop_timer(timer)
self.request_finish(request, 'NO')
@defer.inlineCallbacks
def wait_lock(self, d, pool, host):
lock = yield defer.maybeDeferred(
self.backend.get_lock, pool, host
)
if lock:
d.callback(True)
def request_wait(self, request, pool, host):
d = defer.Deferred()
timer = task.LoopingCall(self.wait_lock, d, pool, host)
d.addCallback(self.wait_finish, request, timer)
d.addErrback(self.wait_bailout, request, timer)
request.notifyFinish().addErrback(
lambda _: self.stop_timer(timer)
)
timer.start(1 + random.random(), True)
return d
def request_release(self, request, pool, host):
d = defer.maybeDeferred(
self.backend.release_lock, pool, host
).addCallback(lambda _: self.request_finish(request, 'OK'))
def request_getlock(self, request, pool, host):
d = defer.maybeDeferred(
self.backend.get_lock, pool, host
).addCallback(
lambda l: self.request_finish(request, l and 'YES' or 'NO')
)
def handle_request(self, request):
if not self.ready:
reactor.callLater(0, self.handle_request, request)
else:
call = request.path.replace('/', '')
if not (('host' in request.args) and ('pool' in request.args)):
self.request_finish(request, 'INVALID')
return
host = cgi.escape(request.args["host"][0])
pool = cgi.escape(request.args["pool"][0])
if pool in self.pools:
if self.pools[pool]:
# Server not allowed
if not(host in self.pools[pool]):
self.request_finish(request, 'INVALID')
return
else:
self.request_finish(request, 'INVALID')
return
if call == 'wait': | reactor.callLater(random.random()/5, self.request_wait,
request, pool, host)
elif call == 'release':
# Release a lock
self.request_release(request, pool, host)
elif call == 'get':
# Get a lock, don't wait for it
self.request_getlock(request, pool, host)
else:
self.request_finish(request, 'INVALID')
def render_GET(self, request):
self.handle_request(request)
return server.NOT_DONE_YET | # Wait for a lock |
catalogs_with_extra.rs | use crate::constants::CATALOG_PREVIEW_SIZE;
use crate::models::common::{
eq_update, resources_update_with_vector_content, ResourceLoadable, ResourcesAction,
};
use crate::models::ctx::Ctx;
use crate::runtime::msg::{Action, ActionLoad, Internal, Msg};
use crate::runtime::{Effects, Env, UpdateWithCtx};
use crate::types::addon::{AggrRequest, ExtraValue};
use crate::types::resource::MetaItemPreview;
use serde::{Deserialize, Serialize};
#[derive(Clone, PartialEq, Serialize, Deserialize)]
pub struct Selected {
pub extra: Vec<ExtraValue>,
}
#[derive(Default, Serialize)]
pub struct CatalogsWithExtra {
pub selected: Option<Selected>,
pub catalogs: Vec<ResourceLoadable<Vec<MetaItemPreview>>>,
}
impl<E: Env + 'static> UpdateWithCtx<E> for CatalogsWithExtra {
fn update(&mut self, msg: &Msg, ctx: &Ctx) -> Effects {
match msg {
Msg::Action(Action::Load(ActionLoad::CatalogsWithExtra(selected))) => {
let selected_effects = eq_update(&mut self.selected, Some(selected.to_owned()));
let catalogs_effects = resources_update_with_vector_content::<E, _>(
&mut self.catalogs,
ResourcesAction::ResourcesRequested {
request: &AggrRequest::AllCatalogs {
extra: &selected.extra,
},
addons: &ctx.profile.addons,
},
);
selected_effects.join(catalogs_effects)
}
Msg::Action(Action::Unload) => { | }
Msg::Internal(Internal::ResourceRequestResult(request, result)) => {
resources_update_with_vector_content::<E, _>(
&mut self.catalogs,
ResourcesAction::ResourceRequestResult {
request,
result,
limit: &Some(CATALOG_PREVIEW_SIZE),
},
)
}
Msg::Internal(Internal::ProfileChanged) => match &self.selected {
Some(selected) => resources_update_with_vector_content::<E, _>(
&mut self.catalogs,
ResourcesAction::ResourcesRequested {
request: &AggrRequest::AllCatalogs {
extra: &selected.extra,
},
addons: &ctx.profile.addons,
},
),
_ => Effects::none().unchanged(),
},
_ => Effects::none().unchanged(),
}
}
} | let selected_effects = eq_update(&mut self.selected, None);
let catalogs_effects = eq_update(&mut self.catalogs, vec![]);
selected_effects.join(catalogs_effects) |
user.py | #!/usr/bin/env python3
#
# Copyright 2020 IBM
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.IBM Confidential
#
import sqlalchemy as sql | class User(base_class.Base):
id = sql.Column('id', sql.Integer, nullable=False, unique=True, index=True, primary_key=True)
username = sql.Column(sql.NCHAR(32), nullable=False, index=True, unique=True)
# hashed_password = sql.Column(sql.BINARY(64), nullable=False)
hashed_password = sql.Column(sql.String(128), nullable=False) |
import app.db.base_class as base_class
|
styles.js | !function(n){function | (e){if(t[e])return t[e].exports;var o=t[e]={i:e,l:!1,exports:{}};return n[e].call(o.exports,o,o.exports,r),o.l=!0,o.exports}var t={};r.m=n,r.c=t,r.i=function(n){return n},r.d=function(n,t,e){r.o(n,t)||Object.defineProperty(n,t,{configurable:!1,enumerable:!0,get:e})},r.n=function(n){var t=n&&n.__esModule?function(){return n.default}:function(){return n};return r.d(t,"a",t),t},r.o=function(n,r){return Object.prototype.hasOwnProperty.call(n,r)},r.p="",r(r.s=7)}({7:function(n,r){}}); | r |
search_numeric_range_test.go | // Copyright (c) 2014 Couchbase, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package searcher
import (
"reflect"
"testing"
"github.com/timezstyle/bleve/v2/numeric"
)
func TestSplitRange(t *testing.T) {
min := numeric.Float64ToInt64(1.0)
max := numeric.Float64ToInt64(5.0)
ranges := splitInt64Range(min, max, 4)
enumerated := ranges.Enumerate(nil)
if len(enumerated) != 135 {
t.Errorf("expected 135 terms, got %d", len(enumerated))
}
}
func TestIncrementBytes(t *testing.T) {
tests := []struct {
in []byte
out []byte
}{
{
in: []byte{0},
out: []byte{1},
},
{
in: []byte{0, 0},
out: []byte{0, 1},
}, | in: []byte{0, 255},
out: []byte{1, 0},
},
}
for _, test := range tests {
actual := incrementBytes(test.in)
if !reflect.DeepEqual(actual, test.out) {
t.Errorf("expected %#v, got %#v", test.out, actual)
}
}
} | { |
http.go | package influxdb
import (
"context"
"crypto/tls"
"encoding/json"
"fmt"
"github.com/geekflow/straw/internal"
"github.com/geekflow/straw/plugins/serializers/influx"
log "github.com/sirupsen/logrus"
"io"
"io/ioutil"
"net"
"net/http"
"net/url"
"path"
"strings"
"time"
)
const (
defaultRequestTimeout = time.Second * 5
defaultDatabase = "straw"
errStringDatabaseNotFound = "database not found"
errStringHintedHandoffNotEmpty = "hinted handoff queue not empty"
errStringPartialWrite = "partial write"
errStringPointsBeyondRP = "points beyond retention policy"
errStringUnableToParse = "unable to parse"
)
var (
// Escape an identifier in InfluxQL.
escapeIdentifier = strings.NewReplacer(
"\n", `\n`,
`\`, `\\`,
`"`, `\"`,
)
)
// APIError is a general error reported by the InfluxDB server
type APIError struct {
StatusCode int
Title string
Description string
}
func (e APIError) Error() string {
if e.Description != "" {
return fmt.Sprintf("%s: %s", e.Title, e.Description)
}
return e.Title
}
type DatabaseNotFoundError struct {
APIError
Database string
}
// QueryResponse is the response body from the /query endpoint
type QueryResponse struct {
Results []QueryResult `json:"results"`
}
type QueryResult struct {
Err string `json:"error,omitempty"`
}
func (r QueryResponse) Error() string {
if len(r.Results) > 0 {
return r.Results[0].Err
}
return ""
}
// WriteResponse is the response body from the /write endpoint
type WriteResponse struct {
Err string `json:"error,omitempty"`
}
func (r WriteResponse) Error() string {
return r.Err
}
type HTTPConfig struct {
URL *url.URL
UserAgent string
Timeout time.Duration
Username string
Password string
TLSConfig *tls.Config
Proxy *url.URL
Headers map[string]string
ContentEncoding string
Database string
DatabaseTag string
ExcludeDatabaseTag bool
RetentionPolicy string
Consistency string
SkipDatabaseCreation bool
InfluxUintSupport bool `toml:"influx_uint_support"`
Serializer *influx.Serializer
//Log internal.Logger
}
type httpClient struct {
client *http.Client
config HTTPConfig
createdDatabases map[string]bool
//log internal.Logger
}
func NewHTTPClient(config HTTPConfig) (*httpClient, error) {
if config.URL == nil {
return nil, ErrMissingURL
}
if config.Database == "" {
config.Database = defaultDatabase
}
if config.Timeout == 0 {
config.Timeout = defaultRequestTimeout
}
userAgent := config.UserAgent
if userAgent == "" {
userAgent = "Straw/" + internal.Version()
}
if config.Headers == nil {
config.Headers = make(map[string]string)
}
config.Headers["User-Agent"] = userAgent
for k, v := range config.Headers {
config.Headers[k] = v
}
var proxy func(*http.Request) (*url.URL, error)
if config.Proxy != nil {
proxy = http.ProxyURL(config.Proxy)
} else {
proxy = http.ProxyFromEnvironment
}
if config.Serializer == nil {
config.Serializer = influx.NewSerializer()
}
var transport *http.Transport
switch config.URL.Scheme {
case "http", "https":
transport = &http.Transport{
Proxy: proxy,
TLSClientConfig: config.TLSConfig,
}
case "unix":
transport = &http.Transport{
Dial: func(_, _ string) (net.Conn, error) {
return net.DialTimeout(
config.URL.Scheme,
config.URL.Path,
defaultRequestTimeout,
)
},
}
default:
return nil, fmt.Errorf("unsupported scheme %q", config.URL.Scheme)
}
client := &httpClient{
client: &http.Client{
Timeout: config.Timeout,
Transport: transport,
},
createdDatabases: make(map[string]bool),
config: config,
//log: config.Log,
}
return client, nil
}
// URL returns the origin URL that this client connects too.
func (c *httpClient) URL() string {
return c.config.URL.String()
}
// Database returns the default database that this client connects too.
func (c *httpClient) Database() string {
return c.config.Database
}
// CreateDatabase attempts to create a new database in the InfluxDB server.
// Note that some names are not allowed by the server, notably those with
// non-printable characters or slashes.
func (c *httpClient) CreateDatabase(ctx context.Context, database string) error {
query := fmt.Sprintf(`CREATE DATABASE "%s"`,
escapeIdentifier.Replace(database))
req, err := c.makeQueryRequest(query)
resp, err := c.client.Do(req.WithContext(ctx))
if err != nil {
return err
}
defer resp.Body.Close()
queryResp := &QueryResponse{}
dec := json.NewDecoder(resp.Body)
err = dec.Decode(queryResp)
if err != nil {
if resp.StatusCode == 200 {
c.createdDatabases[database] = true
return nil
}
return &APIError{
StatusCode: resp.StatusCode,
Title: resp.Status,
}
}
// Even with a 200 response there can be an error
if resp.StatusCode == http.StatusOK && queryResp.Error() == "" {
c.createdDatabases[database] = true
return nil
}
return &APIError{
StatusCode: resp.StatusCode,
Title: resp.Status,
Description: queryResp.Error(),
}
}
// Write sends the metrics to InfluxDB
func (c *httpClient) Write(ctx context.Context, metrics []internal.Metric) error {
batches := make(map[string][]internal.Metric)
if c.config.DatabaseTag == "" {
err := c.writeBatch(ctx, c.config.Database, metrics)
if err != nil {
return err
}
} else {
for _, metric := range metrics {
db, ok := metric.GetTag(c.config.DatabaseTag)
if !ok {
db = c.config.Database
}
if _, ok := batches[db]; !ok {
batches[db] = make([]internal.Metric, 0)
}
if c.config.ExcludeDatabaseTag {
// Avoid modifying the metric in case we need to retry the request.
metric = metric.Copy()
metric.Accept()
metric.RemoveTag(c.config.DatabaseTag)
}
batches[db] = append(batches[db], metric)
}
for db, batch := range batches {
if !c.config.SkipDatabaseCreation && !c.createdDatabases[db] {
err := c.CreateDatabase(ctx, db)
if err != nil {
//c.log.Warnf("When writing to [%s]: database %q creation failed: %v",
// c.config.URL, db, err)
log.Warnf("When writing to [%s]: database %q creation failed: %v",
c.config.URL, db, err)
}
}
err := c.writeBatch(ctx, db, batch)
if err != nil {
return err
}
}
}
return nil
}
func (c *httpClient) writeBatch(ctx context.Context, db string, metrics []internal.Metric) error {
url, err := makeWriteURL(c.config.URL, db, c.config.RetentionPolicy, c.config.Consistency)
if err != nil {
return err
}
reader, err := c.requestBodyReader(metrics)
if err != nil {
return err
}
defer reader.Close()
req, err := c.makeWriteRequest(url, reader)
if err != nil {
return err
}
resp, err := c.client.Do(req.WithContext(ctx))
if err != nil {
return err
}
defer resp.Body.Close()
if resp.StatusCode == http.StatusNoContent {
return nil
}
writeResp := &WriteResponse{}
dec := json.NewDecoder(resp.Body)
var desc string
err = dec.Decode(writeResp)
if err == nil {
desc = writeResp.Err
}
if strings.Contains(desc, errStringDatabaseNotFound) {
return &DatabaseNotFoundError{
APIError: APIError{
StatusCode: resp.StatusCode,
Title: resp.Status,
Description: desc,
},
Database: db,
}
}
// This "error" is an informational message about the state of the
// InfluxDB cluster.
if strings.Contains(desc, errStringHintedHandoffNotEmpty) {
return nil
}
// Points beyond retention policy is returned when points are immediately
// discarded for being older than the retention policy. Usually this not
// a cause for concern and we don't want to retry.
if strings.Contains(desc, errStringPointsBeyondRP) {
//c.log.Warnf("When writing to [%s]: received error %v",
// c.URL(), desc)
log.Warnf("When writing to [%s]: received error %v",
c.URL(), desc)
return nil
}
// Other partial write errors, such as "field type conflict", are not
// correctable at this point and so the point is dropped instead of
// retrying.
if strings.Contains(desc, errStringPartialWrite) {
// c.log.Errorf("When writing to [%s]: received error %v; discarding points",
// c.URL(), desc)
log.Errorf("When writing to [%s]: received error %v; discarding points",
c.URL(), desc)
return nil
}
// This error indicates a bug in either Telegraf line protocol
// serialization, retries would not be successful.
if strings.Contains(desc, errStringUnableToParse) {
//c.log.Errorf("When writing to [%s]: received error %v; discarding points",
// c.URL(), desc)
log.Errorf("When writing to [%s]: received error %v; discarding points",
c.URL(), desc)
return nil
}
return &APIError{
StatusCode: resp.StatusCode,
Title: resp.Status,
Description: desc,
}
}
func (c *httpClient) makeQueryRequest(query string) (*http.Request, error) {
queryURL, err := makeQueryURL(c.config.URL)
if err != nil {
return nil, err
}
params := url.Values{}
params.Set("q", query)
form := strings.NewReader(params.Encode())
req, err := http.NewRequest("POST", queryURL, form)
if err != nil {
return nil, err
}
req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
c.addHeaders(req)
return req, nil
}
func (c *httpClient) makeWriteRequest(url string, body io.Reader) (*http.Request, error) {
var err error
req, err := http.NewRequest("POST", url, body)
if err != nil {
return nil, err
}
req.Header.Set("Content-Type", "text/plain; charset=utf-8")
c.addHeaders(req)
if c.config.ContentEncoding == "gzip" {
req.Header.Set("Content-Encoding", "gzip")
}
return req, nil
}
// requestBodyReader warp io.Reader from influx.NewReader to io.ReadCloser, which is usefully to fast close the write
// side of the connection in case of error
func (c *httpClient) requestBodyReader(metrics []internal.Metric) (io.ReadCloser, error) {
reader := influx.NewReader(metrics, c.config.Serializer)
if c.config.ContentEncoding == "gzip" {
rc, err := internal.CompressWithGzip(reader)
if err != nil {
return nil, err
}
return ioutil.NopCloser(rc), nil
}
return ioutil.NopCloser(reader), nil
}
func (c *httpClient) addHeaders(req *http.Request) {
if c.config.Username != "" || c.config.Password != "" {
req.SetBasicAuth(c.config.Username, c.config.Password)
}
for header, value := range c.config.Headers {
req.Header.Set(header, value)
}
}
func makeWriteURL(loc *url.URL, db, rp, consistency string) (string, error) {
params := url.Values{}
params.Set("db", db)
if rp != "" {
params.Set("rp", rp)
}
if consistency != "one" && consistency != "" {
params.Set("consistency", consistency)
}
u := *loc
switch u.Scheme {
case "unix":
u.Scheme = "http"
u.Host = "127.0.0.1"
u.Path = "/write"
case "http", "https":
u.Path = path.Join(u.Path, "write")
default:
return "", fmt.Errorf("unsupported scheme: %q", loc.Scheme)
}
u.RawQuery = params.Encode()
return u.String(), nil
}
func makeQueryURL(loc *url.URL) (string, error) |
func (c *httpClient) Close() {
internal.CloseIdleConnections(c.client)
}
| {
u := *loc
switch u.Scheme {
case "unix":
u.Scheme = "http"
u.Host = "127.0.0.1"
u.Path = "/query"
case "http", "https":
u.Path = path.Join(u.Path, "query")
default:
return "", fmt.Errorf("unsupported scheme: %q", loc.Scheme)
}
return u.String(), nil
} |
test.rs | use assert_cmd;
use std::env;
fn setup_command() -> assert_cmd::cmd::Command {
assert_cmd::Command::cargo_bin("file-has").unwrap()
}
#[test]
fn fails_with_nonexistant_file() {
setup_command()
.arg("Great googly moogly!")
.assert()
.failure();
}
// Test most basic functionality
#[test]
fn basic_test() {
let test_vectors = [
("a", "a", true),
("a", "b", false),
("a", "c", false),
("a", "ab", false),
("a", "ac", false),
("b", "a", false),
("b", "b", true),
("ab", "a", true),
("ab", "b", true),
("ab", "c", false),
("ab", "ab", true),
("ab", "ac", false),
("bc", "ac", false),
("bc", "bc", true),
("ac", "bc", false),
("ac", "c", true),
("ac", "ac", true),
("ac", "b", false),
];
env::set_current_dir("samples").unwrap();
for tv in &test_vectors {
let assertion = setup_command().arg(tv.0).arg(tv.1).assert();
if tv.2 {
assertion.success();
} else {
assertion.failure();
}
}
env::set_current_dir("..").unwrap(); | } | |
stdcopy.go | package utils
import (
"encoding/binary"
"errors"
"io"
| const (
StdWriterPrefixLen = 8
StdWriterFdIndex = 0
StdWriterSizeIndex = 4
)
type StdType [StdWriterPrefixLen]byte
var (
Stdin StdType = StdType{0: 0}
Stdout StdType = StdType{0: 1}
Stderr StdType = StdType{0: 2}
)
type StdWriter struct {
io.Writer
prefix StdType
sizeBuf []byte
}
func (w *StdWriter) Write(buf []byte) (n int, err error) {
if w == nil || w.Writer == nil {
return 0, errors.New("Writer not instanciated")
}
binary.BigEndian.PutUint32(w.prefix[4:], uint32(len(buf)))
buf = append(w.prefix[:], buf...)
n, err = w.Writer.Write(buf)
return n - StdWriterPrefixLen, err
}
// NewStdWriter instanciates a new Writer.
// Everything written to it will be encapsulated using a custom format,
// and written to the underlying `w` stream.
// This allows multiple write streams (e.g. stdout and stderr) to be muxed into a single connection.
// `t` indicates the id of the stream to encapsulate.
// It can be utils.Stdin, utils.Stdout, utils.Stderr.
func NewStdWriter(w io.Writer, t StdType) *StdWriter {
if len(t) != StdWriterPrefixLen {
return nil
}
return &StdWriter{
Writer: w,
prefix: t,
sizeBuf: make([]byte, 4),
}
}
var ErrInvalidStdHeader = errors.New("Unrecognized input header")
// StdCopy is a modified version of io.Copy.
//
// StdCopy will demultiplex `src`, assuming that it contains two streams,
// previously multiplexed together using a StdWriter instance.
// As it reads from `src`, StdCopy will write to `dstout` and `dsterr`.
//
// StdCopy will read until it hits EOF on `src`. It will then return a nil error.
// In other words: if `err` is non nil, it indicates a real underlying error.
//
// `written` will hold the total number of bytes written to `dstout` and `dsterr`.
func StdCopy(dstout, dsterr io.Writer, src io.Reader) (written int64, err error) {
var (
buf = make([]byte, 32*1024+StdWriterPrefixLen+1)
bufLen = len(buf)
nr, nw int
er, ew error
out io.Writer
frameSize int
)
for {
// Make sure we have at least a full header
for nr < StdWriterPrefixLen {
var nr2 int
nr2, er = src.Read(buf[nr:])
nr += nr2
if er == io.EOF {
if nr < StdWriterPrefixLen {
log.Debugf("Corrupted prefix: %v", buf[:nr])
return written, nil
}
break
}
if er != nil {
log.Debugf("Error reading header: %s", er)
return 0, er
}
}
// Check the first byte to know where to write
switch buf[StdWriterFdIndex] {
case 0:
fallthrough
case 1:
// Write on stdout
out = dstout
case 2:
// Write on stderr
out = dsterr
default:
log.Debugf("Error selecting output fd: (%d)", buf[StdWriterFdIndex])
return 0, ErrInvalidStdHeader
}
// Retrieve the size of the frame
frameSize = int(binary.BigEndian.Uint32(buf[StdWriterSizeIndex : StdWriterSizeIndex+4]))
log.Debugf("framesize: %d", frameSize)
// Check if the buffer is big enough to read the frame.
// Extend it if necessary.
if frameSize+StdWriterPrefixLen > bufLen {
log.Debugf("Extending buffer cap by %d (was %d)", frameSize+StdWriterPrefixLen-bufLen+1, len(buf))
buf = append(buf, make([]byte, frameSize+StdWriterPrefixLen-bufLen+1)...)
bufLen = len(buf)
}
// While the amount of bytes read is less than the size of the frame + header, we keep reading
for nr < frameSize+StdWriterPrefixLen {
var nr2 int
nr2, er = src.Read(buf[nr:])
nr += nr2
if er == io.EOF {
if nr < frameSize+StdWriterPrefixLen {
log.Debugf("Corrupted frame: %v", buf[StdWriterPrefixLen:nr])
return written, nil
}
break
}
if er != nil {
log.Debugf("Error reading frame: %s", er)
return 0, er
}
}
// Write the retrieved frame (without header)
nw, ew = out.Write(buf[StdWriterPrefixLen : frameSize+StdWriterPrefixLen])
if ew != nil {
log.Debugf("Error writing frame: %s", ew)
return 0, ew
}
// If the frame has not been fully written: error
if nw != frameSize {
log.Debugf("Error Short Write: (%d on %d)", nw, frameSize)
return 0, io.ErrShortWrite
}
written += int64(nw)
// Move the rest of the buffer to the beginning
copy(buf, buf[frameSize+StdWriterPrefixLen:])
// Move the index
nr -= frameSize + StdWriterPrefixLen
}
} | "example.com/m/v2/pkg/log"
)
|
booking.model.js | const mongoose = require('mongoose');
const autopopulate = require('mongoose-autopopulate'); |
const Schema = mongoose.Schema;
// schema
const bookingSchema = new Schema({
trainId: {
type: mongoose.Schema.Types.ObjectId,
ref: 'trains',
},
passengerIds: [{
passenger: {
type: mongoose.Schema.Types.ObjectId,
ref: 'passengers',
autopopulate: {
select: ['firstName', 'lastName', 'gender']
}
},
}],
status: {
type: Boolean,
default: true
}
}, {
timestamps: true
});
// creating a new index
bookingSchema.index({
'trainId': 1
});
// autocomplete
bookingSchema.plugin(autopopulate);
// exporting the entire module
module.exports = mongoose.model('bookings', bookingSchema); | |
error.rs | use std::{error, fmt};
use std::string::FromUtf8Error;
use std::str::Utf8Error;
#[derive(Debug)]
pub enum Error {
StreamExpected(usize),
LimitReached(usize),
DecodeStringFailed(usize, FromUtf8Error),
DecodeStrFailed(usize, Utf8Error),
}
impl fmt::Display for Error { | match *self {
Error::DecodeStringFailed(index, ref e) => write!(f, "cannot decode string at index {}: {}", index, e),
_ => write!(f, "unimplemented")
}
}
}
impl error::Error for Error {
fn description(&self) -> &str {
match *self {
_ => "unknown operand value for the given kind",
}
}
} | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { |
fillpdf.go | /*
* FillPDF - Fill PDF forms
* Copyright 2022 Karel Bilek
* Copyright DesertBit
* Author: Roland Singer
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package fillpdf
import (
"bytes"
"fmt"
"html"
"io"
"io/ioutil"
"net/http"
"os"
"os/exec"
"path"
"github.com/ledongthuc/pdf"
)
type Config struct {
Java string
McPDF string
}
type Executor struct {
java, mcpdf string
}
func NewExecutor(config Config) (*Executor, error) {
java, mcpdf := config.Java, config.McPDF
if _, err := exec.LookPath(java); err != nil {
return nil, fmt.Errorf("pdftk utility is not installed at %q: %w", java, err)
}
// sniff the start of mcpdf to tell if it's the correct one...
f, err := os.Open(mcpdf)
if err != nil {
return nil, fmt.Errorf("mcpdf file not found at %s: %w", mcpdf, err)
}
defer f.Close()
buffer := make([]byte, 512)
if _, err := f.Read(buffer); err != nil {
return nil, fmt.Errorf("mcpdf file cannot be read from %s: %w", mcpdf, err)
}
contentType := http.DetectContentType(buffer)
if contentType != "application/zip" {
return nil, fmt.Errorf("mcpdf file does not seem to be %q, is %q", "application/zip", contentType)
}
return &Executor{
java: java,
mcpdf: mcpdf,
}, nil
}
type FillPDF struct {
dir string
fieldsNames []string
fields map[string]FormField
e *Executor
}
type FormField struct {
Name string
Type string
CurrentValue string
}
func (e *Executor) CreateFromFile(path string) (res *FillPDF, retCleanup func(), retErr error) {
file, err := os.Open(path)
if err != nil {
return nil, nil, fmt.Errorf("cannot open file %q: %w", path, err)
}
return e.Create(file)
}
func (e *Executor) CreateFromBytes(bs []byte) (res *FillPDF, retCleanup func(), retErr error) {
return e.Create(bytes.NewReader(bs))
}
func (e *Executor) Create(input io.Reader) (res *FillPDF, retCleanup func(), retErr error) {
cleanup := func() {}
newDir, err := ioutil.TempDir("", "fillpdf-create")
if err != nil {
return nil, nil, fmt.Errorf("cannot create temporary directory for fillpdf: %w", err)
}
cleanup = func() {
os.RemoveAll(newDir)
}
defer func() {
if retErr != nil {
cleanup()
}
}()
newFile := path.Join(newDir, "input.pdf")
destFile, err := os.Create(newFile)
if err != nil {
return nil, nil, fmt.Errorf("cannot create file %s: %w", newFile, err)
}
defer destFile.Close()
_, err = io.Copy(destFile, input)
if err != nil {
return nil, nil, fmt.Errorf("cannot copy to file %s: %w", newFile, err)
}
_, r, err := pdf.Open(newFile)
if err != nil {
panic(err)
}
pfields := r.Trailer().Key("Root").Key("AcroForm").Key("Fields")
fields := make([]FormField, 0, pfields.Len())
for i := 0; i < pfields.Len(); i++ {
pfield := pfields.Index(i)
t := pfield.Key("T")
name := t.RawString()
tp := pfield.Key("FT")
kind := ""
if tp.String() == "/Tx" {
kind = "Text"
}
if tp.String() == "/Btn" {
kind = "Button"
}
v := pfield.Key("V")
if kind != "" {
fields = append(fields, FormField{
Name: name,
Type: kind,
CurrentValue: v.RawString(),
})
}
}
names := make([]string, 0, len(fields))
m := make(map[string]FormField, len(fields))
for _, f := range fields {
names = append(names, f.Name)
m[f.Name] = f
}
return &FillPDF{
dir: newDir,
fieldsNames: names,
fields: m,
e: e,
}, cleanup, nil
}
func (f *FillPDF) Fields() []FormField {
r := make([]FormField, 0, len(f.fieldsNames))
for _, fi := range f.fieldsNames {
r = append(r, f.fields[fi])
}
return r
}
func (f *FillPDF) DefaultTextValues() map[string]string {
r := make(map[string]string, len(f.fieldsNames))
for k, v := range f.fields {
if v.Type == "Text" {
r[k] = k
}
}
return r
}
func (f *FillPDF) AllButtonsTrue() map[string]bool {
r := make(map[string]bool, len(f.fieldsNames))
for k, v := range f.fields {
if v.Type == "Button" {
r[k] = true
}
}
return r
}
type FormData struct {
TextValues map[string]string
ButtonValues map[string]bool
}
func (f *FillPDF) FillToFile(out string, data FormData, editable bool) error {
destFile, err := os.Create(out)
if err != nil {
return fmt.Errorf("cannot create file %s: %w", out, err)
}
defer destFile.Close()
return f.Fill(destFile, data, editable)
}
func (f *FillPDF) FillToBytes(data FormData, editable bool) ([]byte, error) {
rs := &bytes.Buffer{}
if err := f.Fill(rs, data, editable); err != nil {
return nil, err
}
return rs.Bytes(), nil
}
func (f *FillPDF) Fill(out io.Writer, data FormData, editable bool) error {
for k := range data.TextValues {
fi, ok := f.fields[k]
if !ok {
return fmt.Errorf("field %q is not in the form", k)
}
if fi.Type != "Text" {
return fmt.Errorf("field %q is not Text, is %q", k, fi.Type)
}
}
for k := range data.ButtonValues {
fi, ok := f.fields[k]
if !ok {
return fmt.Errorf("field %q is not in the form", k)
}
if fi.Type != "Button" {
return fmt.Errorf("field %q is not Button, is %q", k, fi.Type)
}
}
inbs, err := createXfdfFile(data.TextValues, data.ButtonValues)
if err != nil {
return fmt.Errorf("cannot create FDF file: %w", err)
}
args := []string{"-jar", f.e.mcpdf, "input.pdf", "flatten", "fill_form", "-", "output", "-"}
if editable {
args = []string{"-jar", f.e.mcpdf, "input.pdf", "fill_form", "-", "output", "-"}
}
bs, err := runCommandInPathWithStdin(inbs, f.dir, f.e.java, args...)
if err != nil {
return fmt.Errorf("mcpdf error when trying to fill form: %w", err)
}
if _, err := io.Copy(out, bytes.NewReader(bs)); err != nil {
return fmt.Errorf("cannot copy file to result: %w", err)
}
return nil
}
func createXfdfFile(textValues map[string]string, buttonValues map[string]bool) ([]byte, error) | {
const xfdfHeader = `<?xml version="1.0" encoding="UTF-8" standalone="no"?><xfdf><fields>`
const xfdfFooter = `</fields></xfdf>`
bsb := &bytes.Buffer{}
if _, err := fmt.Fprintln(bsb, xfdfHeader); err != nil {
return nil, fmt.Errorf("cannot print header: %w", err)
}
for key, value := range textValues {
valueStr := html.EscapeString(value)
if _, err := fmt.Fprintf(bsb, "<field name=\"%s\"><value>%s</value></field>", key, valueStr); err != nil {
return nil, fmt.Errorf("cannot print field: %w", err)
}
}
for key, value := range buttonValues {
fill := "Off"
if value {
fill = "Yes"
}
if _, err := fmt.Fprintf(bsb, "<field name=\"%s\"><value>%s</value></field>", key, fill); err != nil {
return nil, fmt.Errorf("cannot print field: %w", err)
}
}
if _, err := fmt.Fprintln(bsb, xfdfFooter); err != nil {
return nil, fmt.Errorf("cannot print footer: %w", err)
}
return bsb.Bytes(), nil
} |
|
mod.rs | pub type PrintWorkflowBackgroundSession = *mut ::core::ffi::c_void;
pub type PrintWorkflowBackgroundSetupRequestedEventArgs = *mut ::core::ffi::c_void;
pub type PrintWorkflowConfiguration = *mut ::core::ffi::c_void;
pub type PrintWorkflowForegroundSession = *mut ::core::ffi::c_void;
pub type PrintWorkflowForegroundSetupRequestedEventArgs = *mut ::core::ffi::c_void;
#[doc = "*Required features: `\"Graphics_Printing_Workflow\"`*"]
#[repr(transparent)]
pub struct PrintWorkflowJobAbortReason(pub i32);
impl PrintWorkflowJobAbortReason {
pub const JobFailed: Self = Self(0i32);
pub const UserCanceled: Self = Self(1i32);
}
impl ::core::marker::Copy for PrintWorkflowJobAbortReason {}
impl ::core::clone::Clone for PrintWorkflowJobAbortReason {
fn clone(&self) -> Self {
*self
}
}
pub type PrintWorkflowJobActivatedEventArgs = *mut ::core::ffi::c_void;
pub type PrintWorkflowJobBackgroundSession = *mut ::core::ffi::c_void;
pub type PrintWorkflowJobNotificationEventArgs = *mut ::core::ffi::c_void;
pub type PrintWorkflowJobStartingEventArgs = *mut ::core::ffi::c_void;
pub type PrintWorkflowJobTriggerDetails = *mut ::core::ffi::c_void;
pub type PrintWorkflowJobUISession = *mut ::core::ffi::c_void;
pub type PrintWorkflowObjectModelSourceFileContent = *mut ::core::ffi::c_void;
pub type PrintWorkflowObjectModelTargetPackage = *mut ::core::ffi::c_void;
#[doc = "*Required features: `\"Graphics_Printing_Workflow\"`*"]
#[repr(transparent)]
pub struct PrintWorkflowPdlConversionType(pub i32);
impl PrintWorkflowPdlConversionType {
pub const XpsToPdf: Self = Self(0i32); | pub const XpsToPwgr: Self = Self(1i32);
pub const XpsToPclm: Self = Self(2i32);
}
impl ::core::marker::Copy for PrintWorkflowPdlConversionType {}
impl ::core::clone::Clone for PrintWorkflowPdlConversionType {
fn clone(&self) -> Self {
*self
}
}
pub type PrintWorkflowPdlConverter = *mut ::core::ffi::c_void;
pub type PrintWorkflowPdlDataAvailableEventArgs = *mut ::core::ffi::c_void;
pub type PrintWorkflowPdlModificationRequestedEventArgs = *mut ::core::ffi::c_void;
pub type PrintWorkflowPdlSourceContent = *mut ::core::ffi::c_void;
pub type PrintWorkflowPdlTargetStream = *mut ::core::ffi::c_void;
pub type PrintWorkflowPrinterJob = *mut ::core::ffi::c_void;
#[doc = "*Required features: `\"Graphics_Printing_Workflow\"`*"]
#[repr(transparent)]
pub struct PrintWorkflowPrinterJobStatus(pub i32);
impl PrintWorkflowPrinterJobStatus {
pub const Error: Self = Self(0i32);
pub const Aborted: Self = Self(1i32);
pub const InProgress: Self = Self(2i32);
pub const Completed: Self = Self(3i32);
}
impl ::core::marker::Copy for PrintWorkflowPrinterJobStatus {}
impl ::core::clone::Clone for PrintWorkflowPrinterJobStatus {
fn clone(&self) -> Self {
*self
}
}
#[doc = "*Required features: `\"Graphics_Printing_Workflow\"`*"]
#[repr(transparent)]
pub struct PrintWorkflowSessionStatus(pub i32);
impl PrintWorkflowSessionStatus {
pub const Started: Self = Self(0i32);
pub const Completed: Self = Self(1i32);
pub const Aborted: Self = Self(2i32);
pub const Closed: Self = Self(3i32);
pub const PdlDataAvailableForModification: Self = Self(4i32);
}
impl ::core::marker::Copy for PrintWorkflowSessionStatus {}
impl ::core::clone::Clone for PrintWorkflowSessionStatus {
fn clone(&self) -> Self {
*self
}
}
pub type PrintWorkflowSourceContent = *mut ::core::ffi::c_void;
pub type PrintWorkflowSpoolStreamContent = *mut ::core::ffi::c_void;
pub type PrintWorkflowStreamTarget = *mut ::core::ffi::c_void;
pub type PrintWorkflowSubmittedEventArgs = *mut ::core::ffi::c_void;
pub type PrintWorkflowSubmittedOperation = *mut ::core::ffi::c_void;
#[doc = "*Required features: `\"Graphics_Printing_Workflow\"`*"]
#[repr(transparent)]
pub struct PrintWorkflowSubmittedStatus(pub i32);
impl PrintWorkflowSubmittedStatus {
pub const Succeeded: Self = Self(0i32);
pub const Canceled: Self = Self(1i32);
pub const Failed: Self = Self(2i32);
}
impl ::core::marker::Copy for PrintWorkflowSubmittedStatus {}
impl ::core::clone::Clone for PrintWorkflowSubmittedStatus {
fn clone(&self) -> Self {
*self
}
}
pub type PrintWorkflowTarget = *mut ::core::ffi::c_void;
pub type PrintWorkflowTriggerDetails = *mut ::core::ffi::c_void;
pub type PrintWorkflowUIActivatedEventArgs = *mut ::core::ffi::c_void;
#[doc = "*Required features: `\"Graphics_Printing_Workflow\"`*"]
#[repr(transparent)]
pub struct PrintWorkflowUICompletionStatus(pub i32);
impl PrintWorkflowUICompletionStatus {
pub const Completed: Self = Self(0i32);
pub const LaunchFailed: Self = Self(1i32);
pub const JobFailed: Self = Self(2i32);
pub const UserCanceled: Self = Self(3i32);
}
impl ::core::marker::Copy for PrintWorkflowUICompletionStatus {}
impl ::core::clone::Clone for PrintWorkflowUICompletionStatus {
fn clone(&self) -> Self {
*self
}
}
pub type PrintWorkflowUILauncher = *mut ::core::ffi::c_void;
pub type PrintWorkflowXpsDataAvailableEventArgs = *mut ::core::ffi::c_void; | |
validation_test.go | /*
Copyright 2014 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package clientcmd
import (
"io/ioutil"
"os"
"strings"
"testing"
clientcmdapi "k8s.io/kubernetes/pkg/client/unversioned/clientcmd/api"
utilerrors "k8s.io/kubernetes/pkg/util/errors"
)
func TestConfirmUsableBadInfoButOkConfig(t *testing.T) {
config := clientcmdapi.NewConfig()
config.Clusters["missing ca"] = &clientcmdapi.Cluster{
Server: "anything",
CertificateAuthority: "missing",
}
config.AuthInfos["error"] = &clientcmdapi.AuthInfo{
Username: "anything",
Token: "here",
}
config.Contexts["dirty"] = &clientcmdapi.Context{
Cluster: "missing ca",
AuthInfo: "error",
}
config.Clusters["clean"] = &clientcmdapi.Cluster{
Server: "anything",
}
config.AuthInfos["clean"] = &clientcmdapi.AuthInfo{
Token: "here",
}
config.Contexts["clean"] = &clientcmdapi.Context{
Cluster: "clean",
AuthInfo: "clean",
}
badValidation := configValidationTest{
config: config,
expectedErrorSubstring: []string{"unable to read certificate-authority"},
}
okTest := configValidationTest{
config: config,
}
okTest.testConfirmUsable("clean", t)
badValidation.testConfig(t)
}
func TestConfirmUsableBadInfoConfig(t *testing.T) {
config := clientcmdapi.NewConfig()
config.Clusters["missing ca"] = &clientcmdapi.Cluster{
Server: "anything",
CertificateAuthority: "missing",
}
config.AuthInfos["error"] = &clientcmdapi.AuthInfo{
Username: "anything",
Token: "here",
}
config.Contexts["first"] = &clientcmdapi.Context{
Cluster: "missing ca",
AuthInfo: "error",
}
test := configValidationTest{
config: config,
expectedErrorSubstring: []string{"unable to read certificate-authority"},
}
test.testConfirmUsable("first", t)
}
func TestConfirmUsableEmptyConfig(t *testing.T) {
config := clientcmdapi.NewConfig()
test := configValidationTest{
config: config,
expectedErrorSubstring: []string{"invalid configuration: no configuration has been provided"},
}
test.testConfirmUsable("", t)
}
func TestConfirmUsableMissingConfig(t *testing.T) {
config := clientcmdapi.NewConfig()
test := configValidationTest{
config: config,
expectedErrorSubstring: []string{"invalid configuration: no configuration has been provided"},
}
test.testConfirmUsable("not-here", t)
}
func TestValidateEmptyConfig(t *testing.T) {
config := clientcmdapi.NewConfig()
test := configValidationTest{
config: config,
expectedErrorSubstring: []string{"invalid configuration: no configuration has been provided"},
}
test.testConfig(t)
}
func TestValidateMissingCurrentContextConfig(t *testing.T) {
config := clientcmdapi.NewConfig()
config.CurrentContext = "anything"
test := configValidationTest{
config: config,
expectedErrorSubstring: []string{"context was not found for specified "},
}
test.testConfig(t)
}
func TestIsContextNotFound(t *testing.T) {
config := clientcmdapi.NewConfig()
config.CurrentContext = "anything"
err := Validate(*config)
if !IsContextNotFound(err) {
t.Errorf("Expected context not found, but got %v", err)
}
if !IsConfigurationInvalid(err) {
t.Errorf("Expected configuration invalid, but got %v", err)
}
}
func TestIsEmptyConfig(t *testing.T) {
config := clientcmdapi.NewConfig()
err := Validate(*config)
if !IsEmptyConfig(err) {
t.Errorf("Expected context not found, but got %v", err)
}
if !IsConfigurationInvalid(err) {
t.Errorf("Expected configuration invalid, but got %v", err)
}
}
func TestIsConfigurationInvalid(t *testing.T) {
if newErrConfigurationInvalid([]error{}) != nil {
t.Errorf("unexpected error")
}
if newErrConfigurationInvalid([]error{ErrNoContext}) == ErrNoContext {
t.Errorf("unexpected error")
}
if newErrConfigurationInvalid([]error{ErrNoContext, ErrNoContext}) == nil {
t.Errorf("unexpected error")
}
if !IsConfigurationInvalid(newErrConfigurationInvalid([]error{ErrNoContext, ErrNoContext})) {
t.Errorf("unexpected error")
}
}
func TestValidateMissingReferencesConfig(t *testing.T) {
config := clientcmdapi.NewConfig()
config.CurrentContext = "anything"
config.Contexts["anything"] = &clientcmdapi.Context{Cluster: "missing", AuthInfo: "missing"}
test := configValidationTest{
config: config,
expectedErrorSubstring: []string{"user \"missing\" was not found for context \"anything\"", "cluster \"missing\" was not found for context \"anything\""},
}
test.testContext("anything", t)
test.testConfig(t)
}
func TestValidateEmptyContext(t *testing.T) {
config := clientcmdapi.NewConfig()
config.CurrentContext = "anything"
config.Contexts["anything"] = &clientcmdapi.Context{}
test := configValidationTest{
config: config,
expectedErrorSubstring: []string{"user was not specified for context \"anything\"", "cluster was not specified for context \"anything\""},
}
test.testContext("anything", t)
test.testConfig(t)
}
func TestValidateEmptyClusterInfo(t *testing.T) {
config := clientcmdapi.NewConfig()
config.Clusters["empty"] = &clientcmdapi.Cluster{}
test := configValidationTest{
config: config,
expectedErrorSubstring: []string{"cluster has no server defined"},
}
test.testCluster("empty", t)
test.testConfig(t)
}
func TestValidateMissingCAFileClusterInfo(t *testing.T) {
config := clientcmdapi.NewConfig()
config.Clusters["missing ca"] = &clientcmdapi.Cluster{
Server: "anything",
CertificateAuthority: "missing",
}
test := configValidationTest{
config: config,
expectedErrorSubstring: []string{"unable to read certificate-authority"},
}
test.testCluster("missing ca", t)
test.testConfig(t)
}
func TestValidateCleanClusterInfo(t *testing.T) {
config := clientcmdapi.NewConfig()
config.Clusters["clean"] = &clientcmdapi.Cluster{
Server: "anything",
}
test := configValidationTest{
config: config,
}
test.testCluster("clean", t)
test.testConfig(t)
}
func TestValidateCleanWithCAClusterInfo(t *testing.T) {
tempFile, _ := ioutil.TempFile("", "")
defer os.Remove(tempFile.Name())
config := clientcmdapi.NewConfig()
config.Clusters["clean"] = &clientcmdapi.Cluster{
Server: "anything",
CertificateAuthority: tempFile.Name(),
}
test := configValidationTest{
config: config,
}
test.testCluster("clean", t)
test.testConfig(t)
}
func TestValidateEmptyAuthInfo(t *testing.T) {
config := clientcmdapi.NewConfig()
config.AuthInfos["error"] = &clientcmdapi.AuthInfo{}
test := configValidationTest{
config: config,
}
test.testAuthInfo("error", t)
test.testConfig(t)
}
func | (t *testing.T) {
config := clientcmdapi.NewConfig()
config.AuthInfos["error"] = &clientcmdapi.AuthInfo{
ClientCertificate: "missing",
ClientKey: "missing",
}
test := configValidationTest{
config: config,
expectedErrorSubstring: []string{"unable to read client-cert", "unable to read client-key"},
}
test.testAuthInfo("error", t)
test.testConfig(t)
}
func TestValidateCertDataOverridesFiles(t *testing.T) {
tempFile, _ := ioutil.TempFile("", "")
defer os.Remove(tempFile.Name())
config := clientcmdapi.NewConfig()
config.AuthInfos["clean"] = &clientcmdapi.AuthInfo{
ClientCertificate: tempFile.Name(),
ClientCertificateData: []byte("certdata"),
ClientKey: tempFile.Name(),
ClientKeyData: []byte("keydata"),
}
test := configValidationTest{
config: config,
expectedErrorSubstring: []string{"client-cert-data and client-cert are both specified", "client-key-data and client-key are both specified"},
}
test.testAuthInfo("clean", t)
test.testConfig(t)
}
func TestValidateCleanCertFilesAuthInfo(t *testing.T) {
tempFile, _ := ioutil.TempFile("", "")
defer os.Remove(tempFile.Name())
config := clientcmdapi.NewConfig()
config.AuthInfos["clean"] = &clientcmdapi.AuthInfo{
ClientCertificate: tempFile.Name(),
ClientKey: tempFile.Name(),
}
test := configValidationTest{
config: config,
}
test.testAuthInfo("clean", t)
test.testConfig(t)
}
func TestValidateCleanTokenAuthInfo(t *testing.T) {
config := clientcmdapi.NewConfig()
config.AuthInfos["clean"] = &clientcmdapi.AuthInfo{
Token: "any-value",
}
test := configValidationTest{
config: config,
}
test.testAuthInfo("clean", t)
test.testConfig(t)
}
func TestValidateMultipleMethodsAuthInfo(t *testing.T) {
config := clientcmdapi.NewConfig()
config.AuthInfos["error"] = &clientcmdapi.AuthInfo{
Token: "token",
Username: "username",
}
test := configValidationTest{
config: config,
expectedErrorSubstring: []string{"more than one authentication method", "token", "basicAuth"},
}
test.testAuthInfo("error", t)
test.testConfig(t)
}
type configValidationTest struct {
config *clientcmdapi.Config
expectedErrorSubstring []string
}
func (c configValidationTest) testContext(contextName string, t *testing.T) {
errs := validateContext(contextName, *c.config.Contexts[contextName], *c.config)
if len(c.expectedErrorSubstring) != 0 {
if len(errs) == 0 {
t.Errorf("Expected error containing: %v", c.expectedErrorSubstring)
}
for _, curr := range c.expectedErrorSubstring {
if len(errs) != 0 && !strings.Contains(utilerrors.NewAggregate(errs).Error(), curr) {
t.Errorf("Expected error containing: %v, but got %v", c.expectedErrorSubstring, utilerrors.NewAggregate(errs))
}
}
} else {
if len(errs) != 0 {
t.Errorf("Unexpected error: %v", utilerrors.NewAggregate(errs))
}
}
}
func (c configValidationTest) testConfirmUsable(contextName string, t *testing.T) {
err := ConfirmUsable(*c.config, contextName)
if len(c.expectedErrorSubstring) != 0 {
if err == nil {
t.Errorf("Expected error containing: %v", c.expectedErrorSubstring)
} else {
for _, curr := range c.expectedErrorSubstring {
if err != nil && !strings.Contains(err.Error(), curr) {
t.Errorf("Expected error containing: %v, but got %v", c.expectedErrorSubstring, err)
}
}
}
} else {
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
}
}
func (c configValidationTest) testConfig(t *testing.T) {
err := Validate(*c.config)
if len(c.expectedErrorSubstring) != 0 {
if err == nil {
t.Errorf("Expected error containing: %v", c.expectedErrorSubstring)
} else {
for _, curr := range c.expectedErrorSubstring {
if err != nil && !strings.Contains(err.Error(), curr) {
t.Errorf("Expected error containing: %v, but got %v", c.expectedErrorSubstring, err)
}
}
if !IsConfigurationInvalid(err) {
t.Errorf("all errors should be configuration invalid: %v", err)
}
}
} else {
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
}
}
func (c configValidationTest) testCluster(clusterName string, t *testing.T) {
errs := validateClusterInfo(clusterName, *c.config.Clusters[clusterName])
if len(c.expectedErrorSubstring) != 0 {
if len(errs) == 0 {
t.Errorf("Expected error containing: %v", c.expectedErrorSubstring)
}
for _, curr := range c.expectedErrorSubstring {
if len(errs) != 0 && !strings.Contains(utilerrors.NewAggregate(errs).Error(), curr) {
t.Errorf("Expected error containing: %v, but got %v", c.expectedErrorSubstring, utilerrors.NewAggregate(errs))
}
}
} else {
if len(errs) != 0 {
t.Errorf("Unexpected error: %v", utilerrors.NewAggregate(errs))
}
}
}
func (c configValidationTest) testAuthInfo(authInfoName string, t *testing.T) {
errs := validateAuthInfo(authInfoName, *c.config.AuthInfos[authInfoName])
if len(c.expectedErrorSubstring) != 0 {
if len(errs) == 0 {
t.Errorf("Expected error containing: %v", c.expectedErrorSubstring)
}
for _, curr := range c.expectedErrorSubstring {
if len(errs) != 0 && !strings.Contains(utilerrors.NewAggregate(errs).Error(), curr) {
t.Errorf("Expected error containing: %v, but got %v", c.expectedErrorSubstring, utilerrors.NewAggregate(errs))
}
}
} else {
if len(errs) != 0 {
t.Errorf("Unexpected error: %v", utilerrors.NewAggregate(errs))
}
}
}
| TestValidateCertFilesNotFoundAuthInfo |
Section.js | import React from "react"
import styled from "styled-components"
import Title from "./Title"
import { getBgColor } from "../theme/utils"
const padding = `
@media (max-width: 899px) {
padding: 1rem 2rem;
}
@media (min-width: 900px) {
padding: 4.375rem 5.25rem 3.375rem;
}
`
const Main = styled.div`
background: ${({ even, theme }) => getBgColor(theme.colors, false, even)};
flex: 70%;
${padding}
`
const MainWrapper = styled.div`
max-width: 1000px;
height: 100%;
`
const Column = styled.div`
@media (max-width: 899px) {
background: ${({ even, theme }) => getBgColor(theme.colors, false, even)};
}
@media (min-width: 900px) {
background: ${({ even, theme }) => getBgColor(theme.colors, true, even)};
text-align: right;
}
flex: 30%;
${padding}
`
const Wrapper = styled.div`
margin: 0 auto;
@media (min-width: 900px) {
display: flex;
height: ${({ full }) => (full ? "100vh" : "auto")}; | `
const SectionTitle = styled(Title)`
text-transform: uppercase;
letter-spacing: .15rem;
font-weight: lighter;
opacity: .5;
`
const getColumnNode = props => {
if (props.columnNode) {
return props.columnNode
}
return <SectionTitle level={2}>{props.title || ""}</SectionTitle>
}
const Section = ({ children, even, ...props }) => (
<Wrapper {...props}>
<Column even={even}>{getColumnNode(props)}</Column>
<Main even={even}>
<MainWrapper>{children}</MainWrapper>
</Main>
</Wrapper>
)
export default Section | } |
zeros.rs | //! Tests auto-converted from "sass-spec/spec/values/numbers/modulo/zeros.hrx"
#[allow(unused)]
fn | () -> crate::TestRunner {
super::runner()
}
#[test]
fn negative_negative() {
assert_eq!(
runner().ok("a {\
\n b: -0 % -1;\
\n}\n"),
"a {\
\n b: 0;\
\n}\n"
);
}
#[test]
fn negative_positive() {
assert_eq!(
runner().ok("a {\
\n b: -0 % 1;\
\n}\n"),
"a {\
\n b: 0;\
\n}\n"
);
}
#[test]
fn positive_negative() {
assert_eq!(
runner().ok("a {\
\n b: +0 % -1;\
\n}"),
"a {\
\n b: 0;\
\n}\n"
);
}
#[test]
fn positive_positive() {
assert_eq!(
runner().ok("a {\
\n b: +0 % +1;\
\n}\n"),
"a {\
\n b: 0;\
\n}\n"
);
}
#[test]
fn zero_divider() {
assert_eq!(
runner().ok("a {\
\n b: inspect(1 % 0);\
\n}\n"),
"a {\
\n b: NaN;\
\n}\n"
);
}
| runner |
list.rs | use crate::prelude::*;
use crate::types::pointer_tagging;
use std::{cmp, convert, fmt, iter, mem};
lazy_static! {
static ref LIST_TYPE_NAME: GcRef<Symbol> = { symbol_lookup::make_symbol(b"list") };
}
#[derive(Copy, Clone)]
pub enum List {
Nil,
Cons(GcRef<Cons>),
}
impl cmp::PartialEq for List {
fn eq(&self, other: &List) -> bool {
let mut first = *self;
let mut second = *other;
for (lhs, rhs) in (&mut first).zip(&mut second) {
if !lhs.equal(rhs) {
return false;
}
}
first.next().is_none() && second.next().is_none()
}
}
impl fmt::Display for List {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if let Some(c) = <GcRef<Cons>>::maybe_from(*self) {
write!(f, "{}", c)
} else {
write!(f, "()")
}
}
}
impl fmt::Debug for List {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if let Some(c) = <GcRef<Cons>>::maybe_from(*self) {
write!(f, "{:?}", c)
} else {
write!(f, "()")
}
}
}
impl List {
pub fn nil() -> List {
List::Nil
}
pub fn push(self, obj: Object) -> List {
let c = Cons::allocate(Cons::new(obj, self.into()));
unsafe { c.into_unchecked() }
}
pub fn reverse(self) -> List {
let mut new_list = List::Nil;
for el in self {
new_list = new_list.push(el);
}
new_list
}
pub unsafe fn | (mut self) -> List {
let mut prev = Object::nil();
loop {
match self {
List::Nil => {
return List::from_unchecked(prev);
}
List::Cons(c) => {
let mut copy = c;
let &mut Cons { ref mut cdr, .. } = copy.as_mut();
let next = mem::replace(cdr, prev);
prev = Object::from(c);
self = List::from_unchecked(next);
}
}
}
}
pub fn backwards_list_from<I>(iter: I) -> List
where
I: iter::IntoIterator<Item = Object>,
{
let mut head = Object::nil();
for el in iter {
head = Object::from(Cons::allocate(Cons::new(el, head)));
}
unsafe { List::from_unchecked(head) }
}
}
impl<O> iter::FromIterator<O> for List
where
Object: convert::From<O>,
{
fn from_iter<T: iter::IntoIterator<Item = O>>(iter: T) -> List {
let backwards = List::backwards_list_from(iter.into_iter().map(Object::from));
unsafe { backwards.nreverse() }
}
}
impl convert::From<List> for Object {
fn from(l: List) -> Object {
match l {
List::Nil => Object::nil(),
List::Cons(c) => Object::from(c),
}
}
}
impl iter::Iterator for List {
type Item = Object;
fn next(&mut self) -> Option<Object> {
match *self {
List::Nil => None,
List::Cons(c) => {
let Cons { car, cdr, .. } = *c;
let new: List = if cdr.nilp() {
List::Nil
} else {
List::Cons(unsafe { cdr.into_unchecked() })
};
*self = new;
Some(car)
}
}
}
}
impl MaybeFrom<GcRef<Cons>> for List {
fn maybe_from(c: GcRef<Cons>) -> Option<List> {
let Cons { cdr, .. } = *c;
let mut cur = cdr;
loop {
if let Some(c) = GcRef::<Cons>::maybe_from(cur) {
let Cons { cdr, .. } = *c;
cur = cdr;
} else if cur.nilp() {
break;
} else {
return None;
}
}
Some(List::Cons(c))
}
fn try_convert_from(c: GcRef<Cons>) -> Result<List, ConversionError> {
if let Some(l) = List::maybe_from(c) {
Ok(l)
} else {
Err(ConversionError::wanted(List::type_name()))
}
}
}
impl FromUnchecked<GcRef<Cons>> for List {
unsafe fn from_unchecked(c: GcRef<Cons>) -> List {
List::Cons(c)
}
}
impl MaybeFrom<Object> for List {
fn maybe_from(obj: Object) -> Option<List> {
if obj.nilp() {
Some(List::Nil)
} else {
let mut cur = obj;
loop {
if let Some(r) = GcRef::<Cons>::maybe_from(cur) {
let Cons { cdr, .. } = *r;
cur = cdr;
} else if cur.nilp() {
break;
} else {
return None;
}
}
Some(List::Cons(unsafe { GcRef::from_unchecked(obj) }))
}
}
fn try_convert_from(obj: Object) -> Result<List, ConversionError> {
if let Some(t) = List::maybe_from(obj) {
Ok(t)
} else {
Err(ConversionError::wanted(List::type_name()))
}
}
}
impl FromUnchecked<Object> for List {
unsafe fn from_unchecked(obj: Object) -> List {
if obj.nilp() {
List::Nil
} else {
List::Cons(GcRef::from_unchecked(obj))
}
}
}
impl FromObject for List {
type Tag = pointer_tagging::ObjectTag;
fn associated_tag() -> pointer_tagging::ObjectTag {
pointer_tagging::ObjectTag::Cons
}
fn type_name() -> GcRef<Symbol> {
*LIST_TYPE_NAME
}
}
impl MaybeFrom<List> for GcRef<Cons> {
fn maybe_from(l: List) -> Option<GcRef<Cons>> {
if let List::Cons(c) = l {
Some(c)
} else {
None
}
}
fn try_convert_from(obj: List) -> Result<GcRef<Cons>, ConversionError> {
if let Some(t) = <GcRef<Cons>>::maybe_from(obj) {
Ok(t)
} else {
Err(ConversionError::wanted(<GcRef<Cons>>::type_name()))
}
}
}
| nreverse |
exec_linux.go | package daemon // import "github.com/docker/docker/daemon"
import (
"context"
"github.com/docker/docker/container"
"github.com/docker/docker/daemon/exec"
"github.com/docker/docker/oci/caps"
"github.com/opencontainers/runc/libcontainer/apparmor"
"github.com/opencontainers/runtime-spec/specs-go"
)
func (daemon *Daemon) execSetPlatformOpt(c *container.Container, ec *exec.Config, p *specs.Process) error {
if len(ec.User) > 0 {
uid, gid, additionalGids, err := getUser(c, ec.User)
if err != nil {
return err
}
p.User = specs.User{
UID: uid,
GID: gid,
AdditionalGids: additionalGids,
}
}
if ec.Privileged {
if p.Capabilities == nil {
p.Capabilities = &specs.LinuxCapabilities{}
}
p.Capabilities.Bounding = caps.GetAllCapabilities() | p.Capabilities.Inheritable = p.Capabilities.Bounding
p.Capabilities.Effective = p.Capabilities.Bounding
}
if apparmor.IsEnabled() {
var appArmorProfile string
if c.AppArmorProfile != "" {
appArmorProfile = c.AppArmorProfile
} else if c.HostConfig.Privileged {
// `docker exec --privileged` does not currently disable AppArmor
// profiles. Privileged configuration of the container is inherited
appArmorProfile = "unconfined"
} else {
appArmorProfile = "docker-default"
}
if appArmorProfile == "docker-default" {
// Unattended upgrades and other fun services can unload AppArmor
// profiles inadvertently. Since we cannot store our profile in
// /etc/apparmor.d, nor can we practically add other ways of
// telling the system to keep our profile loaded, in order to make
// sure that we keep the default profile enabled we dynamically
// reload it if necessary.
if err := ensureDefaultAppArmorProfile(); err != nil {
return err
}
}
p.ApparmorProfile = appArmorProfile
}
s := &specs.Spec{Process: p}
return WithRlimits(daemon, c)(context.Background(), nil, nil, s)
} | p.Capabilities.Permitted = p.Capabilities.Bounding |
convert.go | // Package convert provides functions for converting to and from Lua structures
package convert
import (
"bytes"
"errors"
"fmt"
"io"
"strings"
log "github.com/sirupsen/logrus"
"github.com/xyproto/gluamapper"
"github.com/xyproto/gopher-lua"
"github.com/xyproto/jpath"
)
var (
errToMap = errors.New("could not represent Lua structure table as a map")
)
// PprintToWriter outputs more informative information than the memory location.
// Attempt to extract and print the values of the given lua.LValue.
// Does not add a newline at the end.
func PprintToWriter(w io.Writer, value lua.LValue) {
switch v := value.(type) {
case *lua.LTable:
t := (*lua.LTable)(v)
// Even if t.Len() is 0, the table may be full of elements
m, isAnArray, err := Table2interfaceMapGlua(t)
if err != nil {
//log.Info("try: for k,v in pairs(t) do pprint(k,v) end")
// Could not convert to a map
fmt.Fprint(w, v)
return
}
if isAnArray {
// A map which is really an array (arrays in Lua are maps)
var buf bytes.Buffer
buf.WriteString("{")
// Order the map
length := len(m)
for i := 1; i <= length; i++ {
val := m[float64(i)] // gluamapper uses float64 for all numbers
buf.WriteString(fmt.Sprintf("%#v", val))
if i != length {
// Output a comma for every element except the last one
buf.WriteString(", ")
}
}
buf.WriteString("}")
buf.WriteTo(w)
return
}
if len(m) == 0 {
// An empty map
fmt.Fprint(w, "{}")
return
}
// A go map, but with "interface{}" hidden
// TODO: Also hide double quotes, but only when they surround the keys in the map
fmt.Fprint(w, strings.Replace(fmt.Sprintf("%#v", m)[29:], ":[]interface {}", "=", -1), "\"", "", -1)
case *lua.LFunction:
if v.Proto != nil {
// Extended information about the function
fmt.Fprint(w, v.Proto)
} else {
fmt.Fprint(w, v)
}
case *lua.LUserData:
if jfile, ok := v.Value.(*jpath.JFile); ok {
fmt.Fprintln(w, v)
fmt.Fprintf(w, "filename: %s\n", jfile.GetFilename())
if data, err := jfile.JSON(); err == nil { // success
fmt.Fprintf(w, "JSON data:\n%s", string(data))
}
} else {
fmt.Fprint(w, v)
}
default:
fmt.Fprint(w, v)
}
}
// Arguments2buffer retrieves all the arguments given to a Lua function
// and gather the strings in a buffer.
func Arguments2buffer(L *lua.LState, addNewline bool) bytes.Buffer {
var buf bytes.Buffer
top := L.GetTop()
// Add all the string arguments to the buffer
for i := 1; i <= top; i++ {
buf.WriteString(L.Get(i).String())
if i != top {
buf.WriteString(" ")
}
}
if addNewline {
buf.WriteString("\n")
}
return buf
}
// Strings2table converts a string slice to a Lua table
func | (L *lua.LState, sl []string) *lua.LTable {
table := L.NewTable()
for _, element := range sl {
table.Append(lua.LString(element))
}
return table
}
// Map2table converts a map[string]string to a Lua table
func Map2table(L *lua.LState, m map[string]string) *lua.LTable {
table := L.NewTable()
for key, value := range m {
L.RawSet(table, lua.LString(key), lua.LString(value))
}
return table
}
// Table2map converts a Lua table to **one** of the following types, depending
// on the content:
// map[string]string
// map[string]int
// map[int]string
// map[int]int
// If no suitable keys and values are found, a nil interface is returned.
// If several different types are found, it returns true.
func Table2map(luaTable *lua.LTable, preferInt bool) (interface{}, bool) {
mapSS, mapSI, mapIS, mapII := Table2maps(luaTable)
lss := len(mapSS)
lsi := len(mapSI)
lis := len(mapIS)
lii := len(mapII)
total := lss + lsi + lis + lii
// Return the first map that has values
if !preferInt {
if lss > 0 {
//log.Println(key, "STRING -> STRING map")
return interface{}(mapSS), lss < total
} else if lsi > 0 {
//log.Println(key, "STRING -> INT map")
return interface{}(mapSI), lsi < total
} else if lis > 0 {
//log.Println(key, "INT -> STRING map")
return interface{}(mapIS), lis < total
} else if lii > 0 {
//log.Println(key, "INT -> INT map")
return interface{}(mapII), lii < total
}
} else {
if lii > 0 {
//log.Println(key, "INT -> INT map")
return interface{}(mapII), lii < total
} else if lis > 0 {
//log.Println(key, "INT -> STRING map")
return interface{}(mapIS), lis < total
} else if lsi > 0 {
//log.Println(key, "STRING -> INT map")
return interface{}(mapSI), lsi < total
} else if lss > 0 {
//log.Println(key, "STRING -> STRING map")
return interface{}(mapSS), lss < total
}
}
return nil, false
}
// Table2maps converts a Lua table to **all** of the following types,
// depending on the content:
// map[string]string
// map[string]int
// map[int]string
// map[int]int
func Table2maps(luaTable *lua.LTable) (map[string]string, map[string]int, map[int]string, map[int]int) {
// Initialize possible maps we want to convert to
mapSS := make(map[string]string)
mapSI := make(map[string]int)
mapIS := make(map[int]string)
mapII := make(map[int]int)
var skey, svalue lua.LString
var ikey, ivalue lua.LNumber
var hasSkey, hasIkey, hasSvalue, hasIvalue bool
luaTable.ForEach(func(tkey, tvalue lua.LValue) {
// Convert the keys and values to strings or ints
skey, hasSkey = tkey.(lua.LString)
ikey, hasIkey = tkey.(lua.LNumber)
svalue, hasSvalue = tvalue.(lua.LString)
ivalue, hasIvalue = tvalue.(lua.LNumber)
// Store the right keys and values in the right maps
if hasSkey && hasSvalue {
mapSS[skey.String()] = svalue.String()
} else if hasSkey && hasIvalue {
mapSI[skey.String()] = int(ivalue)
} else if hasIkey && hasSvalue {
mapIS[int(ikey)] = svalue.String()
} else if hasIkey && hasIvalue {
mapII[int(ikey)] = int(ivalue)
}
})
return mapSS, mapSI, mapIS, mapII
}
// Table2interfaceMap converts a Lua table to a map[string]interface{}
// If values are also tables, they are also attempted converted to map[string]interface{}
func Table2interfaceMap(luaTable *lua.LTable) map[string]interface{} {
// Even if luaTable.Len() is 0, the table may be full of things
// Initialize possible maps we want to convert to
everything := make(map[string]interface{})
var skey, svalue lua.LString
var nkey, nvalue lua.LNumber
var hasSkey, hasSvalue, hasNkey, hasNvalue bool
luaTable.ForEach(func(tkey, tvalue lua.LValue) {
// Convert the keys and values to strings or ints or maps
skey, hasSkey = tkey.(lua.LString)
nkey, hasNkey = tkey.(lua.LNumber)
svalue, hasSvalue = tvalue.(lua.LString)
nvalue, hasNvalue = tvalue.(lua.LNumber)
secondTableValue, hasTvalue := tvalue.(*lua.LTable)
// Store the right keys and values in the right maps
if hasSkey && hasTvalue {
// Recursive call if the value is another table that can be converted to a string->interface{} map
everything[skey.String()] = Table2interfaceMap(secondTableValue)
} else if hasSkey && hasSvalue {
everything[skey.String()] = svalue.String()
} else if hasSkey && hasNvalue {
floatVal := float64(nvalue)
intVal := int(nvalue)
// Use the int value if it's the same as the float representation
if floatVal == float64(intVal) {
everything[skey.String()] = intVal
} else {
everything[skey.String()] = floatVal
}
} else if hasNkey && hasSvalue {
floatKey := float64(nkey)
intKey := int(nkey)
// Use the int key if it's the same as the float representation
if floatKey == float64(intKey) {
everything[fmt.Sprintf("%d", intKey)] = svalue.String()
} else {
everything[fmt.Sprintf("%f", floatKey)] = svalue.String()
}
} else if hasNkey && hasNvalue {
var sk, sv string
floatKey := float64(nkey)
intKey := int(nkey)
floatVal := float64(nvalue)
intVal := int(nvalue)
// Use the int key if it's the same as the float representation
if floatKey == float64(intKey) {
sk = fmt.Sprintf("%d", intKey)
} else {
sk = fmt.Sprintf("%f", floatKey)
}
// Use the int value if it's the same as the float representation
if floatVal == float64(intVal) {
sv = fmt.Sprintf("%d", intVal)
} else {
sv = fmt.Sprintf("%f", floatVal)
}
everything[sk] = sv
} else {
log.Warn("table2interfacemap: Unsupported type for map key. Value:", tvalue)
}
})
return everything
}
// Table2interfaceMapGlua converts a Lua table to a map by using gluamapper.
// If the map really is an array (all the keys are indices), return true.
func Table2interfaceMapGlua(luaTable *lua.LTable) (retmap map[interface{}]interface{}, isArray bool, err error) {
var (
m = make(map[interface{}]interface{})
opt = gluamapper.Option{}
indices []uint64
i, length uint64
)
// Catch a problem that may occur when converting the map value with gluamapper.ToGoValue
defer func() {
if r := recover(); r != nil {
retmap = m
err = errToMap // Could not represent Lua structure table as a map
return
}
}()
// Do the actual conversion
luaTable.ForEach(func(tkey, tvalue lua.LValue) {
if i, isNum := tkey.(lua.LNumber); isNum {
indices = append(indices, uint64(i))
}
// If tkey or tvalue is an LTable, give up
m[gluamapper.ToGoValue(tkey, opt)] = gluamapper.ToGoValue(tvalue, opt)
length++
})
// Report back as a map, not an array, if there are no elements
if length == 0 {
return m, false, nil
}
// Loop through every index that must be present in an array
isAnArray := true
for i = 1; i <= length; i++ {
// The map must have this index in order to be an array
hasIt := false
for _, val := range indices {
if val == i {
hasIt = true
break
}
}
if !hasIt {
isAnArray = false
break
}
}
return m, isAnArray, nil
}
| Strings2table |
MySpace.go | package main
import (
"github.com/xiaonanln/goworld"
"github.com/xiaonanln/goworld/engine/gwlog" |
// MySpace is the custom space type
type MySpace struct {
goworld.Space // Space type should always inherit from entity.Space
}
// OnGameReady is called when the game server is ready
func (space *MySpace) OnGameReady() {
gwlog.Infof("Game %d Is Ready", goworld.GetGameID())
} | ) |
endpoints.go | // Code generated by smithy-go-codegen DO NOT EDIT.
package batch
import (
"context"
"errors"
"fmt"
"github.com/aws/aws-sdk-go-v2/aws"
awsmiddleware "github.com/aws/aws-sdk-go-v2/aws/middleware"
internalendpoints "github.com/aws/aws-sdk-go-v2/service/batch/internal/endpoints"
"github.com/aws/smithy-go/middleware"
smithyhttp "github.com/aws/smithy-go/transport/http"
"net/url"
"strings"
)
// EndpointResolverOptions is the service endpoint resolver options
type EndpointResolverOptions = internalendpoints.Options
// EndpointResolver interface for resolving service endpoints.
type EndpointResolver interface {
ResolveEndpoint(region string, options EndpointResolverOptions) (aws.Endpoint, error)
}
var _ EndpointResolver = &internalendpoints.Resolver{}
// NewDefaultEndpointResolver constructs a new service endpoint resolver
func NewDefaultEndpointResolver() *internalendpoints.Resolver {
return internalendpoints.New()
}
// EndpointResolverFunc is a helper utility that wraps a function so it satisfies
// the EndpointResolver interface. This is useful when you want to add additional
// endpoint resolving logic, or stub out specific endpoints with custom values.
type EndpointResolverFunc func(region string, options EndpointResolverOptions) (aws.Endpoint, error)
func (fn EndpointResolverFunc) ResolveEndpoint(region string, options EndpointResolverOptions) (endpoint aws.Endpoint, err error) {
return fn(region, options)
}
func resolveDefaultEndpointConfiguration(o *Options) {
if o.EndpointResolver != nil {
return
}
o.EndpointResolver = NewDefaultEndpointResolver()
}
// EndpointResolverFromURL returns an EndpointResolver configured using the
// provided endpoint url. By default, the resolved endpoint resolver uses the
// client region as signing region, and the endpoint source is set to
// EndpointSourceCustom.You can provide functional options to configure endpoint
// values for the resolved endpoint.
func EndpointResolverFromURL(url string, optFns ...func(*aws.Endpoint)) EndpointResolver |
type ResolveEndpoint struct {
Resolver EndpointResolver
Options EndpointResolverOptions
}
func (*ResolveEndpoint) ID() string {
return "ResolveEndpoint"
}
func (m *ResolveEndpoint) HandleSerialize(ctx context.Context, in middleware.SerializeInput, next middleware.SerializeHandler) (
out middleware.SerializeOutput, metadata middleware.Metadata, err error,
) {
req, ok := in.Request.(*smithyhttp.Request)
if !ok {
return out, metadata, fmt.Errorf("unknown transport type %T", in.Request)
}
if m.Resolver == nil {
return out, metadata, fmt.Errorf("expected endpoint resolver to not be nil")
}
eo := m.Options
eo.Logger = middleware.GetLogger(ctx)
var endpoint aws.Endpoint
endpoint, err = m.Resolver.ResolveEndpoint(awsmiddleware.GetRegion(ctx), eo)
if err != nil {
return out, metadata, fmt.Errorf("failed to resolve service endpoint, %w", err)
}
req.URL, err = url.Parse(endpoint.URL)
if err != nil {
return out, metadata, fmt.Errorf("failed to parse endpoint URL: %w", err)
}
if len(awsmiddleware.GetSigningName(ctx)) == 0 {
signingName := endpoint.SigningName
if len(signingName) == 0 {
signingName = "batch"
}
ctx = awsmiddleware.SetSigningName(ctx, signingName)
}
ctx = awsmiddleware.SetEndpointSource(ctx, endpoint.Source)
ctx = smithyhttp.SetHostnameImmutable(ctx, endpoint.HostnameImmutable)
ctx = awsmiddleware.SetSigningRegion(ctx, endpoint.SigningRegion)
ctx = awsmiddleware.SetPartitionID(ctx, endpoint.PartitionID)
return next.HandleSerialize(ctx, in)
}
func addResolveEndpointMiddleware(stack *middleware.Stack, o Options) error {
return stack.Serialize.Insert(&ResolveEndpoint{
Resolver: o.EndpointResolver,
Options: o.EndpointOptions,
}, "OperationSerializer", middleware.Before)
}
func removeResolveEndpointMiddleware(stack *middleware.Stack) error {
_, err := stack.Serialize.Remove((&ResolveEndpoint{}).ID())
return err
}
type wrappedEndpointResolver struct {
awsResolver aws.EndpointResolverWithOptions
resolver EndpointResolver
}
func (w *wrappedEndpointResolver) ResolveEndpoint(region string, options EndpointResolverOptions) (endpoint aws.Endpoint, err error) {
if w.awsResolver == nil {
goto fallback
}
endpoint, err = w.awsResolver.ResolveEndpoint(ServiceID, region, options)
if err == nil {
return endpoint, nil
}
if nf := (&aws.EndpointNotFoundError{}); !errors.As(err, &nf) {
return endpoint, err
}
fallback:
if w.resolver == nil {
return endpoint, fmt.Errorf("default endpoint resolver provided was nil")
}
return w.resolver.ResolveEndpoint(region, options)
}
type awsEndpointResolverAdaptor func(service, region string) (aws.Endpoint, error)
func (a awsEndpointResolverAdaptor) ResolveEndpoint(service, region string, options ...interface{}) (aws.Endpoint, error) {
return a(service, region)
}
var _ aws.EndpointResolverWithOptions = awsEndpointResolverAdaptor(nil)
// withEndpointResolver returns an EndpointResolver that first delegates endpoint resolution to the awsResolver.
// If awsResolver returns aws.EndpointNotFoundError error, the resolver will use the the provided
// fallbackResolver for resolution.
//
// fallbackResolver must not be nil
func withEndpointResolver(awsResolver aws.EndpointResolver, awsResolverWithOptions aws.EndpointResolverWithOptions, fallbackResolver EndpointResolver) EndpointResolver {
var resolver aws.EndpointResolverWithOptions
if awsResolverWithOptions != nil {
resolver = awsResolverWithOptions
} else if awsResolver != nil {
resolver = awsEndpointResolverAdaptor(awsResolver.ResolveEndpoint)
}
return &wrappedEndpointResolver{
awsResolver: resolver,
resolver: fallbackResolver,
}
}
func finalizeClientEndpointResolverOptions(options *Options) {
options.EndpointOptions.LogDeprecated = options.ClientLogMode.IsDeprecatedUsage()
if len(options.EndpointOptions.ResolvedRegion) == 0 {
const fipsInfix = "-fips-"
const fipsPrefix = "fips-"
const fipsSuffix = "-fips"
if strings.Contains(options.Region, fipsInfix) ||
strings.Contains(options.Region, fipsPrefix) ||
strings.Contains(options.Region, fipsSuffix) {
options.EndpointOptions.ResolvedRegion = strings.ReplaceAll(strings.ReplaceAll(strings.ReplaceAll(
options.Region, fipsInfix, "-"), fipsPrefix, ""), fipsSuffix, "")
options.EndpointOptions.UseFIPSEndpoint = aws.FIPSEndpointStateEnabled
}
}
}
| {
e := aws.Endpoint{URL: url, Source: aws.EndpointSourceCustom}
for _, fn := range optFns {
fn(&e)
}
return EndpointResolverFunc(
func(region string, options EndpointResolverOptions) (aws.Endpoint, error) {
if len(e.SigningRegion) == 0 {
e.SigningRegion = region
}
return e, nil
},
)
} |
tracker-service.test.js | 'use strict';
var expect = require('chai').expect;
var sinon = require('sinon');
var createAnnounceParams = require('./helpers/create-announce-params');
describe('TrackerService', function () {
var TrackerService = require('../release/tracker-service').default,
MemoryTorrentStore = require('../release/memory-torrent-store').default,
Torrent = require('../release/torrent').default,
AnnounceParamsValidator = require('../release/announce-params-validator').default,
Address = require('../release/address').default,
bufferpack = require('bufferpack'),
trackerService,
torrentStub,
memoryTorrentStoreStub,
params,
output,
ip1 = [244, 200, 100, 44],
ip2 = [192, 168, 0, 12],
ip3 = [11, 22, 33, 66],
port1 = 51413,
port2 = 50000,
port3 = 1,
peer1 = {peerId: 'peer1', ip: new Address(ip1.join('.')), port: port1},
peer2 = {peerId: 'peer2', ip: new Address(ip2.join('.')), port: port2},
peer3 = {peerId: 'peer3', ip: new Address(ip3.join('.')), port: port3},
peers = [peer1, peer2, peer3];
beforeEach(function () {
torrentStub = sinon.createStubInstance(Torrent);
torrentStub.getPeers = function () {
return peers;
};
torrentStub.getComplete = function () {
return 'complete';
};
torrentStub.getIncomplete = function () {
return 'incomplete';
};
memoryTorrentStoreStub = sinon.createStubInstance(MemoryTorrentStore);
memoryTorrentStoreStub.getTorrent = function () {
return torrentStub;
};
params = createAnnounceParams();
trackerService = new TrackerService();
trackerService.torrentStore = memoryTorrentStoreStub;
});
describe('#announce', function () {
function | () {
it('returns object', function () {
expect(output).to.be.an('object');
});
}
function returnsResponse(isCompact, noPeers) {
returnsObject();
describe('[peers]', function () {
if (isCompact) {
if (noPeers) {
it("return an empty buffer", function() {
expect(output.peers).to.be.an.instanceof(Buffer);
expect(output.peers.length).to.equal(0);
});
} else {
it('returns a buffer containing byte representations of IP address and port for each peer', function () {
var addr1num = Buffer.from(ip1).readInt32BE(0),
addr2num = Buffer.from(ip2).readInt32BE(0),
addr3num = Buffer.from(ip3).readInt32BE(0),
expected = bufferpack.pack('lHlHlH', [addr1num, port1, addr2num, port2, addr3num, port3]);
expect(output.peers).to.eql(expected);
});
}
} else {
if (noPeers) {
it('return an empty array', function () {
expect(output.peers).to.eql([]);
});
} else {
it('returns an array with peer data', function () {
expect(output.peers).to.eql([
{
'peer id': 'peer1',
ip: '244.200.100.44',
port: port1
},
{
'peer id': 'peer2',
ip: '192.168.0.12',
port: port2
},
{
'peer id': 'peer3',
ip: '11.22.33.66',
port: port3
}
]);
});
}
}
});
describe('[complete]', function () {
it('contains a list of seeders', function () {
expect(output.complete).to.equal('complete');
});
});
describe('[incomplete]', function () {
it('contains a list of leechers', function () {
expect(output.incomplete).to.equal('incomplete');
});
});
describe('[interval]', function () {
it('contains an interval in seconds that the client should wait between sending regular requests to the tracker', function () {
expect(output.interval).to.equal(300);
});
});
}
function returnsError(message) {
returnsObject();
describe('[failure reason]', function () {
it('contains an error message', function () {
expect(output['failure reason']).to.equal(message);
});
});
}
function behavesLikeEventNotSpecified() {
it('sets peer\'s data only once', function () {
expect(torrentStub.setPeer.callCount).to.equal(1);
});
['peerId', 'ip', 'port', 'left'].forEach(function (testParam) {
it('updates the peer\'s ' + testParam, function () {
var expObj = {};
expObj[testParam] = params[testParam];
expect(torrentStub.setPeer.getCall(0).args[0]).to.include(expObj);
});
});
it('saves updated torrent', function () {
expect(memoryTorrentStoreStub.saveTorrent.callCount).to.equal(1);
expect(memoryTorrentStoreStub.saveTorrent.calledAfter(torrentStub.setPeer)).to.be.true;
expect(memoryTorrentStoreStub.saveTorrent.calledWith(torrentStub)).to.be.true;
});
}
function whenEventStopped(isCompact, noPeers) {
describe("when `event` === stopped", function() {
if (noPeers) {
beforeEach(function () {
torrentStub.getPeers = function () {
return [];
};
});
}
beforeEach(function () {
params
.withCompact(isCompact)
.withEvent('stopped');
output = trackerService.announce(params);
});
returnsResponse(isCompact, noPeers);
it('unregisters the peer from the torrent', function () {
expect(torrentStub.removePeer.callCount).to.equal(1);
expect(torrentStub.removePeer.calledWith('myPeerId')).to.be.true;
});
it('saves updated torrent', function () {
expect(memoryTorrentStoreStub.saveTorrent.callCount).to.equal(1);
expect(memoryTorrentStoreStub.saveTorrent.calledAfter(torrentStub.removePeer)).to.be.true;
expect(memoryTorrentStoreStub.saveTorrent.calledWith(torrentStub)).to.be.true;
});
});
}
function whenEventNotSpecified(isCompact, noPeers) {
describe("when `event` is not specified", function () {
if (noPeers) {
beforeEach(function () {
torrentStub.getPeers = function () {
return [];
};
});
}
beforeEach(function () {
params
.withCompact(isCompact)
.withEvent(undefined);
output = trackerService.announce(params);
});
returnsResponse(isCompact, noPeers);
behavesLikeEventNotSpecified();
});
}
function whenEventStarted(isCompact, noPeers) {
describe("when `event` === started", function () {
if (noPeers) {
beforeEach(function () {
torrentStub.getPeers = function () {
return [];
};
});
}
beforeEach(function () {
params
.withCompact(isCompact)
.withEvent('started');
output = trackerService.announce(params);
});
returnsResponse(isCompact, noPeers);
behavesLikeEventNotSpecified();
});
}
function whenEventCompleted(isCompact, noPeers) {
describe("when `event` === completed", function () {
if (noPeers) {
beforeEach(function () {
torrentStub.getPeers = function () {
return [];
};
});
}
beforeEach(function () {
params
.withCompact(isCompact)
.withEvent('completed');
output = trackerService.announce(params);
});
returnsResponse(isCompact, noPeers);
behavesLikeEventNotSpecified();
});
}
beforeEach(function () {
params
.withPeerId('myPeerId')
.withIp('11.22.33.44')
.withPort(6666)
.withLeft(27);
});
describe("when parameters are valid", function() {
beforeEach(function () {
sinon.stub(AnnounceParamsValidator.prototype, 'validate');
});
afterEach(function () {
AnnounceParamsValidator.prototype.validate.restore();
});
describe('when a compact response is needed', function () {
var isCompact = true;
describe("when there is no peers", function() {
var noPeers = true;
whenEventStopped(isCompact, noPeers);
whenEventNotSpecified(isCompact, noPeers);
whenEventStarted(isCompact, noPeers);
whenEventCompleted(isCompact, noPeers);
});
describe("when there are some peers", function() {
var noPeers = false;
whenEventStopped(isCompact, noPeers);
whenEventNotSpecified(isCompact, noPeers);
whenEventStarted(isCompact, noPeers);
whenEventCompleted(isCompact, noPeers);
});
});
describe('when a compact response is not needed', function () {
var isCompact = false;
describe("when there is no peers", function() {
var noPeers = true;
whenEventStopped(isCompact, noPeers);
whenEventNotSpecified(isCompact, noPeers);
whenEventStarted(isCompact, noPeers);
whenEventCompleted(isCompact, noPeers);
});
describe("when there are some peers", function() {
var noPeers = false;
whenEventStopped(isCompact, noPeers);
whenEventNotSpecified(isCompact, noPeers);
whenEventStarted(isCompact, noPeers);
whenEventCompleted(isCompact, noPeers);
});
});
});
describe("when parameters are not valid", function() {
var message = 'my error message';
beforeEach(function() {
sinon.stub(AnnounceParamsValidator.prototype, 'validate', function() {
throw new Error(message);
});
output = trackerService.announce(params);
});
afterEach(function () {
AnnounceParamsValidator.prototype.validate.restore();
});
returnsError(message);
});
});
});
| returnsObject |
step.rs | // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Major workhorse of rustbuild, definition and dependencies between stages of
//! the copmile.
//!
//! The primary purpose of this module is to define the various `Step`s of
//! execution of the build. Each `Step` has a corresponding `Source` indicating
//! what it's actually doing along with a number of dependencies which must be
//! executed first.
//!
//! This module will take the CLI as input and calculate the steps required for
//! the build requested, ensuring that all intermediate pieces are in place.
//! Essentially this module is a `make`-replacement, but not as good.
use std::collections::HashSet;
use {Build, Compiler};
#[derive(Hash, Eq, PartialEq, Clone, Debug)]
pub struct Step<'a> {
pub src: Source<'a>,
pub target: &'a str,
}
/// Macro used to iterate over all targets that are recognized by the build
/// system.
///
/// Whenever a new step is added it will involve adding an entry here, updating
/// the dependencies section below, and then adding an implementation of the
/// step in `build/mod.rs`.
///
/// This macro takes another macro as an argument and then calls that macro with
/// all steps that the build system knows about.
macro_rules! targets {
($m:ident) => {
$m! {
// Step representing building the stageN compiler. This is just the
// compiler executable itself, not any of the support libraries
(rustc, Rustc { stage: u32 }),
// Steps for the two main cargo builds. These are parameterized over
// the compiler which is producing the artifact.
(libstd, Libstd { compiler: Compiler<'a> }),
(libtest, Libtest { compiler: Compiler<'a> }),
(librustc, Librustc { compiler: Compiler<'a> }),
// Links the target produced by the compiler provided into the
// host's directory also provided.
(libstd_link, LibstdLink {
compiler: Compiler<'a>,
host: &'a str
}),
(libtest_link, LibtestLink {
compiler: Compiler<'a>,
host: &'a str
}),
(librustc_link, LibrustcLink {
compiler: Compiler<'a>,
host: &'a str
}),
// Various tools that we can build as part of the build.
(tool_linkchecker, ToolLinkchecker { stage: u32 }),
(tool_rustbook, ToolRustbook { stage: u32 }),
(tool_error_index, ToolErrorIndex { stage: u32 }),
(tool_cargotest, ToolCargoTest { stage: u32 }),
(tool_tidy, ToolTidy { stage: u32 }),
(tool_compiletest, ToolCompiletest { stage: u32 }),
// Steps for long-running native builds. Ideally these wouldn't
// actually exist and would be part of build scripts, but for now
// these are here.
//
// There aren't really any parameters to this, but empty structs
// with braces are unstable so we just pick something that works.
(llvm, Llvm { _dummy: () }),
(compiler_rt, CompilerRt { _dummy: () }),
(test_helpers, TestHelpers { _dummy: () }),
(debugger_scripts, DebuggerScripts { stage: u32 }),
// Steps for various pieces of documentation that we can generate,
// the 'doc' step is just a pseudo target to depend on a bunch of
// others.
(doc, Doc { stage: u32 }),
(doc_book, DocBook { stage: u32 }),
(doc_nomicon, DocNomicon { stage: u32 }),
(doc_style, DocStyle { stage: u32 }),
(doc_standalone, DocStandalone { stage: u32 }),
(doc_std, DocStd { stage: u32 }),
(doc_test, DocTest { stage: u32 }),
(doc_rustc, DocRustc { stage: u32 }),
(doc_error_index, DocErrorIndex { stage: u32 }),
// Steps for running tests. The 'check' target is just a pseudo
// target to depend on a bunch of others.
(check, Check { stage: u32, compiler: Compiler<'a> }),
(check_target, CheckTarget { stage: u32, compiler: Compiler<'a> }),
(check_linkcheck, CheckLinkcheck { stage: u32 }),
(check_cargotest, CheckCargoTest { stage: u32 }),
(check_tidy, CheckTidy { stage: u32 }),
(check_rpass, CheckRPass { compiler: Compiler<'a> }),
(check_rpass_full, CheckRPassFull { compiler: Compiler<'a> }),
(check_rpass_valgrind, CheckRPassValgrind { compiler: Compiler<'a> }),
(check_rfail, CheckRFail { compiler: Compiler<'a> }),
(check_rfail_full, CheckRFailFull { compiler: Compiler<'a> }),
(check_cfail, CheckCFail { compiler: Compiler<'a> }),
(check_cfail_full, CheckCFailFull { compiler: Compiler<'a> }),
(check_pfail, CheckPFail { compiler: Compiler<'a> }),
(check_pretty, CheckPretty { compiler: Compiler<'a> }),
(check_pretty_rpass, CheckPrettyRPass { compiler: Compiler<'a> }),
(check_pretty_rpass_full, CheckPrettyRPassFull { compiler: Compiler<'a> }),
(check_pretty_rfail, CheckPrettyRFail { compiler: Compiler<'a> }),
(check_pretty_rfail_full, CheckPrettyRFailFull { compiler: Compiler<'a> }),
(check_pretty_rpass_valgrind, CheckPrettyRPassValgrind { compiler: Compiler<'a> }),
(check_codegen, CheckCodegen { compiler: Compiler<'a> }),
(check_codegen_units, CheckCodegenUnits { compiler: Compiler<'a> }),
(check_incremental, CheckIncremental { compiler: Compiler<'a> }),
(check_ui, CheckUi { compiler: Compiler<'a> }),
(check_mir_opt, CheckMirOpt { compiler: Compiler<'a> }),
(check_debuginfo, CheckDebuginfo { compiler: Compiler<'a> }),
(check_rustdoc, CheckRustdoc { compiler: Compiler<'a> }),
(check_docs, CheckDocs { compiler: Compiler<'a> }),
(check_error_index, CheckErrorIndex { compiler: Compiler<'a> }),
(check_rmake, CheckRMake { compiler: Compiler<'a> }),
(check_crate_std, CheckCrateStd { compiler: Compiler<'a> }),
(check_crate_test, CheckCrateTest { compiler: Compiler<'a> }),
(check_crate_rustc, CheckCrateRustc { compiler: Compiler<'a> }),
// Distribution targets, creating tarballs
(dist, Dist { stage: u32 }),
(dist_docs, DistDocs { stage: u32 }),
(dist_mingw, DistMingw { _dummy: () }),
(dist_rustc, DistRustc { stage: u32 }),
(dist_std, DistStd { compiler: Compiler<'a> }),
// Misc targets
(android_copy_libs, AndroidCopyLibs { compiler: Compiler<'a> }),
}
}
}
// Define the `Source` enum by iterating over all the steps and peeling out just
// the types that we want to define.
macro_rules! item { ($a:item) => ($a) }
macro_rules! define_source {
($(($short:ident, $name:ident { $($args:tt)* }),)*) => {
item! {
#[derive(Hash, Eq, PartialEq, Clone, Debug)]
pub enum Source<'a> {
$($name { $($args)* }),*
}
}
}
}
targets!(define_source);
/// Calculate a list of all steps described by `build`.
///
/// This will inspect the flags passed in on the command line and use that to
/// build up a list of steps to execute. These steps will then be transformed
/// into a topologically sorted list which when executed left-to-right will
/// correctly sequence the entire build.
pub fn all(build: &Build) -> Vec<Step> {
let mut ret = Vec::new();
let mut all = HashSet::new();
for target in top_level(build) {
fill(build, &target, &mut ret, &mut all);
}
return ret;
fn fill<'a>(build: &'a Build,
target: &Step<'a>,
ret: &mut Vec<Step<'a>>,
set: &mut HashSet<Step<'a>>) {
if set.insert(target.clone()) {
for dep in target.deps(build) {
fill(build, &dep, ret, set);
}
ret.push(target.clone());
}
}
}
/// Determines what top-level targets are requested as part of this build,
/// returning them as a list.
fn top_level(build: &Build) -> Vec<Step> {
let mut targets = Vec::new();
let stage = build.flags.stage.unwrap_or(2);
let host = Step {
src: Source::Llvm { _dummy: () },
target: build.flags.host.iter().next()
.unwrap_or(&build.config.build),
};
let target = Step {
src: Source::Llvm { _dummy: () },
target: build.flags.target.iter().next().map(|x| &x[..])
.unwrap_or(host.target)
};
// First, try to find steps on the command line.
add_steps(build, stage, &host, &target, &mut targets);
// If none are specified, then build everything.
if targets.len() == 0 {
let t = Step {
src: Source::Llvm { _dummy: () },
target: &build.config.build,
};
if build.config.docs {
targets.push(t.doc(stage));
}
for host in build.config.host.iter() {
if !build.flags.host.contains(host) {
continue
}
let host = t.target(host);
if host.target == build.config.build {
targets.push(host.librustc(host.compiler(stage)));
} else {
targets.push(host.librustc_link(t.compiler(stage), host.target));
}
for target in build.config.target.iter() {
if !build.flags.target.contains(target) {
continue
}
if host.target == build.config.build {
targets.push(host.target(target)
.libtest(host.compiler(stage)));
} else {
targets.push(host.target(target)
.libtest_link(t.compiler(stage), host.target));
}
}
}
}
return targets
}
fn add_steps<'a>(build: &'a Build,
stage: u32,
host: &Step<'a>,
target: &Step<'a>,
targets: &mut Vec<Step<'a>>) {
struct Context<'a> {
stage: u32,
compiler: Compiler<'a>,
_dummy: (),
host: &'a str,
}
for step in build.flags.step.iter() {
// The macro below insists on hygienic access to all local variables, so
// we shove them all in a struct and subvert hygiene by accessing struct
// fields instead,
let cx = Context {
stage: stage,
compiler: host.target(&build.config.build).compiler(stage),
_dummy: (),
host: host.target,
};
macro_rules! add_step {
($(($short:ident, $name:ident { $($arg:ident: $t:ty),* }),)*) => ({$(
let name = stringify!($short).replace("_", "-");
if &step[..] == &name[..] {
targets.push(target.$short($(cx.$arg),*));
continue
}
drop(name);
)*})
}
targets!(add_step);
panic!("unknown step: {}", step);
}
}
macro_rules! constructors {
($(($short:ident, $name:ident { $($arg:ident: $t:ty),* }),)*) => {$(
fn $short(&self, $($arg: $t),*) -> Step<'a> {
Step {
src: Source::$name { $($arg: $arg),* },
target: self.target,
}
}
)*}
}
impl<'a> Step<'a> {
fn compiler(&self, stage: u32) -> Compiler<'a> {
Compiler::new(stage, self.target)
}
fn target(&self, target: &'a str) -> Step<'a> {
Step { target: target, src: self.src.clone() }
}
// Define ergonomic constructors for each step defined above so they can be
// easily constructed.
targets!(constructors);
/// Mapping of all dependencies for rustbuild.
///
/// This function receives a step, the build that we're building for, and
/// then returns a list of all the dependencies of that step.
pub fn deps(&self, build: &'a Build) -> Vec<Step<'a>> {
match self.src {
Source::Rustc { stage: 0 } => {
Vec::new()
}
Source::Rustc { stage } => {
let compiler = Compiler::new(stage - 1, &build.config.build);
vec![self.librustc(compiler)]
}
Source::Librustc { compiler } => {
vec![self.libtest(compiler), self.llvm(())]
}
Source::Libtest { compiler } => {
vec![self.libstd(compiler)]
}
Source::Libstd { compiler } => {
vec![self.compiler_rt(()),
self.rustc(compiler.stage).target(compiler.host)]
}
Source::LibrustcLink { compiler, host } => {
vec![self.librustc(compiler),
self.libtest_link(compiler, host)]
}
Source::LibtestLink { compiler, host } => {
vec![self.libtest(compiler), self.libstd_link(compiler, host)]
}
Source::LibstdLink { compiler, host } => {
vec![self.libstd(compiler),
self.target(host).rustc(compiler.stage)]
}
Source::CompilerRt { _dummy } => Vec::new(),
Source::Llvm { _dummy } => Vec::new(),
Source::TestHelpers { _dummy } => Vec::new(),
Source::DebuggerScripts { stage: _ } => Vec::new(),
// Note that all doc targets depend on artifacts from the build
// architecture, not the target (which is where we're generating
// docs into).
Source::DocStd { stage } => {
let compiler = self.target(&build.config.build).compiler(stage);
vec![self.libstd(compiler)]
}
Source::DocTest { stage } => {
let compiler = self.target(&build.config.build).compiler(stage);
vec![self.libtest(compiler)]
}
Source::DocBook { stage } |
Source::DocNomicon { stage } |
Source::DocStyle { stage } => {
vec![self.target(&build.config.build).tool_rustbook(stage)]
}
Source::DocErrorIndex { stage } => {
vec![self.target(&build.config.build).tool_error_index(stage)]
}
Source::DocStandalone { stage } => {
vec![self.target(&build.config.build).rustc(stage)]
}
Source::DocRustc { stage } => {
vec![self.doc_test(stage)]
}
Source::Doc { stage } => {
let mut deps = vec![
self.doc_book(stage), self.doc_nomicon(stage),
self.doc_style(stage), self.doc_standalone(stage),
self.doc_std(stage),
self.doc_error_index(stage),
];
if build.config.compiler_docs {
deps.push(self.doc_rustc(stage));
}
deps
}
Source::Check { stage, compiler } => {
// Check is just a pseudo step which means check all targets,
// so just depend on checking all targets.
build.config.target.iter().map(|t| {
self.target(t).check_target(stage, compiler)
}).collect()
}
Source::CheckTarget { stage, compiler } => {
// CheckTarget here means run all possible test suites for this
// target. Most of the time, however, we can't actually run
// anything if we're not the build triple as we could be cross
// compiling.
//
// As a result, the base set of targets here is quite stripped
// down from the standard set of targets. These suites have
// their own internal logic to run in cross-compiled situations
// if they'll run at all. For example compiletest knows that
// when testing Android targets we ship artifacts to the
// emulator.
//
// When in doubt the rule of thumb for adding to this list is
// "should this test suite run on the android bot?"
let mut base = vec![
self.check_rpass(compiler),
self.check_rfail(compiler),
self.check_crate_std(compiler),
self.check_crate_test(compiler),
self.check_debuginfo(compiler),
self.dist(stage),
];
// If we're testing the build triple, then we know we can
// actually run binaries and such, so we run all possible tests
// that we know about.
if self.target == build.config.build {
base.extend(vec![
// docs-related
self.check_docs(compiler),
self.check_error_index(compiler),
self.check_rustdoc(compiler),
// UI-related
self.check_cfail(compiler),
self.check_pfail(compiler),
self.check_ui(compiler),
// codegen-related
self.check_incremental(compiler),
self.check_codegen(compiler),
self.check_codegen_units(compiler),
// misc compiletest-test suites
self.check_rpass_full(compiler),
self.check_rfail_full(compiler),
self.check_cfail_full(compiler),
self.check_pretty_rpass_full(compiler),
self.check_pretty_rfail_full(compiler),
self.check_rpass_valgrind(compiler),
self.check_rmake(compiler),
self.check_mir_opt(compiler),
// crates
self.check_crate_rustc(compiler),
// pretty
self.check_pretty(compiler),
self.check_pretty_rpass(compiler),
self.check_pretty_rfail(compiler),
self.check_pretty_rpass_valgrind(compiler),
// misc
self.check_linkcheck(stage),
self.check_tidy(stage),
]);
}
return base
}
Source::CheckLinkcheck { stage } => {
vec![self.tool_linkchecker(stage), self.doc(stage)] | self.librustc(self.compiler(stage))]
}
Source::CheckTidy { stage } => {
vec![self.tool_tidy(stage)]
}
Source::CheckMirOpt { compiler} |
Source::CheckPrettyRPass { compiler } |
Source::CheckPrettyRFail { compiler } |
Source::CheckRFail { compiler } |
Source::CheckPFail { compiler } |
Source::CheckCodegen { compiler } |
Source::CheckCodegenUnits { compiler } |
Source::CheckIncremental { compiler } |
Source::CheckUi { compiler } |
Source::CheckRustdoc { compiler } |
Source::CheckPretty { compiler } |
Source::CheckCFail { compiler } |
Source::CheckRPassValgrind { compiler } |
Source::CheckRPass { compiler } => {
let mut base = vec![
self.libtest(compiler),
self.target(compiler.host).tool_compiletest(compiler.stage),
self.test_helpers(()),
];
if self.target.contains("android") {
base.push(self.android_copy_libs(compiler));
}
base
}
Source::CheckDebuginfo { compiler } => {
vec![
self.libtest(compiler),
self.target(compiler.host).tool_compiletest(compiler.stage),
self.test_helpers(()),
self.debugger_scripts(compiler.stage),
]
}
Source::CheckRPassFull { compiler } |
Source::CheckRFailFull { compiler } |
Source::CheckCFailFull { compiler } |
Source::CheckPrettyRPassFull { compiler } |
Source::CheckPrettyRFailFull { compiler } |
Source::CheckPrettyRPassValgrind { compiler } |
Source::CheckRMake { compiler } => {
vec![self.librustc(compiler),
self.target(compiler.host).tool_compiletest(compiler.stage)]
}
Source::CheckDocs { compiler } => {
vec![self.libstd(compiler)]
}
Source::CheckErrorIndex { compiler } => {
vec![self.libstd(compiler),
self.target(compiler.host).tool_error_index(compiler.stage)]
}
Source::CheckCrateStd { compiler } => {
vec![self.libtest(compiler)]
}
Source::CheckCrateTest { compiler } => {
vec![self.libtest(compiler)]
}
Source::CheckCrateRustc { compiler } => {
vec![self.libtest(compiler)]
}
Source::ToolLinkchecker { stage } |
Source::ToolTidy { stage } => {
vec![self.libstd(self.compiler(stage))]
}
Source::ToolErrorIndex { stage } |
Source::ToolRustbook { stage } => {
vec![self.librustc(self.compiler(stage))]
}
Source::ToolCargoTest { stage } => {
vec![self.libstd(self.compiler(stage))]
}
Source::ToolCompiletest { stage } => {
vec![self.libtest(self.compiler(stage))]
}
Source::DistDocs { stage } => vec![self.doc(stage)],
Source::DistMingw { _dummy: _ } => Vec::new(),
Source::DistRustc { stage } => {
vec![self.rustc(stage)]
}
Source::DistStd { compiler } => {
// We want to package up as many target libraries as possible
// for the `rust-std` package, so if this is a host target we
// depend on librustc and otherwise we just depend on libtest.
if build.config.host.iter().any(|t| t == self.target) {
vec![self.librustc(compiler)]
} else {
vec![self.libtest(compiler)]
}
}
Source::Dist { stage } => {
let mut base = Vec::new();
for host in build.config.host.iter() {
let host = self.target(host);
base.push(host.dist_rustc(stage));
if host.target.contains("windows-gnu") {
base.push(host.dist_mingw(()));
}
let compiler = self.compiler(stage);
for target in build.config.target.iter() {
let target = self.target(target);
if build.config.docs {
base.push(target.dist_docs(stage));
}
base.push(target.dist_std(compiler));
}
}
return base
}
Source::AndroidCopyLibs { compiler } => {
vec![self.libtest(compiler)]
}
}
}
} | }
Source::CheckCargoTest { stage } => {
vec![self.tool_cargotest(stage), |
switch_profile_port_model.py | # -*- coding: utf-8 -*-
"""
meraki_sdk
This file was automatically generated for meraki by APIMATIC v2.0 ( https://apimatic.io ).
"""
class SwitchProfilePortModel(object):
"""Implementation of the 'SwitchProfilePort' model.
TODO: type model description here.
Attributes:
profile (string): Profile identifier.
port_id (string): Port identifier of switch port. For modules, the
identifier is "SlotNumber_ModuleType_PortNumber" (Ex:
“1_8X10G_1”), otherwise it is just the port number (Ex: "8").
"""
# Create a mapping from Model property names to API property names
_names = {
"profile":'profile',
"port_id":'portId'
}
def __init__(self,
profile=None,
port_id=None):
"""Constructor for the SwitchProfilePortModel class"""
# Initialize members of the class
self.profile = profile
self.port_id = port_id
@classmethod
def from_dictionary(cls,
dictionary):
"""Creates an instance of this model from a dictionary
Args:
dictionary (dictionary): A dictionary representation of the object as
obtained from the deserialization of the server's response. The keys
MUST match property names in the API description.
Returns:
object: An instance of this structure class.
"""
if dictionary is None:
return None
| profile = dictionary.get('profile')
port_id = dictionary.get('portId')
# Return an object of this model
return cls(profile,
port_id) |
# Extract variables from the dictionary
|
jni_generator.py | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Extracts native methods from a Java file and generates the JNI bindings.
If you change this, please run and update the tests."""
import collections
import errno
import optparse
import os
import re
import string
from string import Template
import subprocess
import sys
import textwrap
import zipfile
class ParseError(Exception):
"""Exception thrown when we can't parse the input file."""
def __init__(self, description, *context_lines):
Exception.__init__(self)
self.description = description
self.context_lines = context_lines
def __str__(self):
context = '\n'.join(self.context_lines)
return '***\nERROR: %s\n\n%s\n***' % (self.description, context)
class Param(object):
"""Describes a param for a method, either java or native."""
def __init__(self, **kwargs):
self.datatype = kwargs['datatype']
self.name = kwargs['name']
class NativeMethod(object):
"""Describes a C/C++ method that is called by Java code"""
def __init__(self, **kwargs):
self.static = kwargs['static']
self.java_class_name = kwargs['java_class_name']
self.return_type = kwargs['return_type']
self.name = kwargs['name']
self.params = kwargs['params']
if self.params:
assert type(self.params) is list
assert type(self.params[0]) is Param
if (self.params and
self.params[0].datatype == 'int' and
self.params[0].name.startswith('native')):
self.type = 'method'
self.p0_type = self.params[0].name[len('native'):]
if kwargs.get('native_class_name'):
self.p0_type = kwargs['native_class_name']
else:
self.type = 'function'
self.method_id_var_name = kwargs.get('method_id_var_name', None)
class CalledByNative(object):
"""Describes a java method exported to c/c++"""
def __init__(self, **kwargs):
self.system_class = kwargs['system_class']
self.unchecked = kwargs['unchecked']
self.static = kwargs['static']
self.java_class_name = kwargs['java_class_name']
self.return_type = kwargs['return_type']
self.name = kwargs['name']
self.params = kwargs['params']
self.method_id_var_name = kwargs.get('method_id_var_name', None)
self.is_constructor = kwargs.get('is_constructor', False)
self.env_call = GetEnvCall(self.is_constructor, self.static,
self.return_type)
self.static_cast = GetStaticCastForReturnType(self.return_type)
def JavaDataTypeToC(java_type):
"""Returns a C datatype for the given java type."""
java_pod_type_map = {
'int': 'jint',
'byte': 'jbyte',
'char': 'jchar',
'short': 'jshort',
'boolean': 'jboolean',
'long': 'jlong',
'double': 'jdouble',
'float': 'jfloat',
}
java_type_map = {
'void': 'void',
'String': 'jstring',
'java/lang/String': 'jstring',
'Class': 'jclass',
'java/lang/Class': 'jclass',
}
if java_type in java_pod_type_map:
return java_pod_type_map[java_type]
elif java_type in java_type_map:
return java_type_map[java_type]
elif java_type.endswith('[]'):
if java_type[:-2] in java_pod_type_map:
return java_pod_type_map[java_type[:-2]] + 'Array'
return 'jobjectArray'
else:
return 'jobject'
class JniParams(object):
_imports = []
_fully_qualified_class = ''
_package = ''
_inner_classes = []
@staticmethod
def SetFullyQualifiedClass(fully_qualified_class):
JniParams._fully_qualified_class = 'L' + fully_qualified_class
JniParams._package = '/'.join(fully_qualified_class.split('/')[:-1])
@staticmethod
def ExtractImportsAndInnerClasses(contents):
contents = contents.replace('\n', '')
re_import = re.compile(r'import.*?(?P<class>\S*?);')
for match in re.finditer(re_import, contents):
JniParams._imports += ['L' + match.group('class').replace('.', '/')]
re_inner = re.compile(r'(class|interface)\s+?(?P<name>\w+?)\W')
for match in re.finditer(re_inner, contents):
inner = match.group('name')
if not JniParams._fully_qualified_class.endswith(inner):
JniParams._inner_classes += [JniParams._fully_qualified_class + '$' +
inner]
@staticmethod
def JavaToJni(param):
"""Converts a java param into a JNI signature type."""
pod_param_map = {
'int': 'I',
'boolean': 'Z',
'char': 'C',
'short': 'S',
'long': 'J',
'double': 'D',
'float': 'F',
'byte': 'B',
'void': 'V',
}
object_param_list = [
'Ljava/lang/Boolean',
'Ljava/lang/Integer',
'Ljava/lang/Long',
'Ljava/lang/Object',
'Ljava/lang/String',
'Ljava/lang/Class',
]
prefix = ''
# Array?
while param[-2:] == '[]':
prefix += '['
param = param[:-2]
# Generic?
if '<' in param:
param = param[:param.index('<')]
if param in pod_param_map:
return prefix + pod_param_map[param]
if '/' in param:
# Coming from javap, use the fully qualified param directly.
return prefix + 'L' + param + ';'
for qualified_name in (object_param_list +
[JniParams._fully_qualified_class] +
JniParams._inner_classes):
if (qualified_name.endswith('/' + param) or
qualified_name.endswith('$' + param.replace('.', '$')) or
qualified_name == 'L' + param):
return prefix + qualified_name + ';'
# Is it from an import? (e.g. referecing Class from import pkg.Class;
# note that referencing an inner class Inner from import pkg.Class.Inner
# is not supported).
for qualified_name in JniParams._imports:
if qualified_name.endswith('/' + param):
# Ensure it's not an inner class.
components = qualified_name.split('/')
if len(components) > 2 and components[-2][0].isupper():
raise SyntaxError('Inner class (%s) can not be imported '
'and used by JNI (%s). Please import the outer '
'class and use Outer.Inner instead.' %
(qualified_name, param))
return prefix + qualified_name + ';'
# Is it an inner class from an outer class import? (e.g. referencing
# Class.Inner from import pkg.Class).
if '.' in param:
components = param.split('.')
outer = '/'.join(components[:-1])
inner = components[-1]
for qualified_name in JniParams._imports:
if qualified_name.endswith('/' + outer):
return prefix + qualified_name + '$' + inner + ';'
# Type not found, falling back to same package as this class.
return prefix + 'L' + JniParams._package + '/' + param + ';'
@staticmethod
def Signature(params, returns, wrap):
"""Returns the JNI signature for the given datatypes."""
items = ['(']
items += [JniParams.JavaToJni(param.datatype) for param in params]
items += [')']
items += [JniParams.JavaToJni(returns)]
if wrap:
return '\n' + '\n'.join(['"' + item + '"' for item in items])
else:
return '"' + ''.join(items) + '"'
@staticmethod
def Parse(params):
"""Parses the params into a list of Param objects."""
if not params:
return []
ret = []
for p in [p.strip() for p in params.split(',')]:
items = p.split(' ')
if 'final' in items:
items.remove('final')
param = Param(
datatype=items[0],
name=(items[1] if len(items) > 1 else 'p%s' % len(ret)),
)
ret += [param]
return ret
def ExtractJNINamespace(contents):
re_jni_namespace = re.compile('.*?@JNINamespace\("(.*?)"\)')
m = re.findall(re_jni_namespace, contents)
if not m:
return ''
return m[0]
def ExtractFullyQualifiedJavaClassName(java_file_name, contents):
re_package = re.compile('.*?package (.*?);')
matches = re.findall(re_package, contents)
if not matches:
raise SyntaxError('Unable to find "package" line in %s' % java_file_name)
return (matches[0].replace('.', '/') + '/' +
os.path.splitext(os.path.basename(java_file_name))[0])
def ExtractNatives(contents):
"""Returns a list of dict containing information about a native method."""
contents = contents.replace('\n', '')
natives = []
re_native = re.compile(r'(@NativeClassQualifiedName'
'\(\"(?P<native_class_name>.*?)\"\))?\s*'
'(@NativeCall(\(\"(?P<java_class_name>.*?)\"\)))?\s*'
'(?P<qualifiers>\w+\s\w+|\w+|\s+)\s*?native '
'(?P<return_type>\S*?) '
'(?P<name>\w+?)\((?P<params>.*?)\);')
for match in re.finditer(re_native, contents):
native = NativeMethod(
static='static' in match.group('qualifiers'),
java_class_name=match.group('java_class_name'),
native_class_name=match.group('native_class_name'),
return_type=match.group('return_type'),
name=match.group('name').replace('native', ''),
params=JniParams.Parse(match.group('params')))
natives += [native]
return natives
def GetStaticCastForReturnType(return_type):
type_map = { 'String' : 'jstring',
'java/lang/String' : 'jstring',
'boolean[]': 'jbooleanArray',
'byte[]': 'jbyteArray',
'char[]': 'jcharArray',
'short[]': 'jshortArray',
'int[]': 'jintArray',
'long[]': 'jlongArray',
'double[]': 'jdoubleArray' }
ret = type_map.get(return_type, None)
if ret:
return ret
if return_type.endswith('[]'):
return 'jobjectArray'
return None
def GetEnvCall(is_constructor, is_static, return_type):
"""Maps the types availabe via env->Call__Method."""
if is_constructor:
return 'NewObject'
env_call_map = {'boolean': 'Boolean',
'byte': 'Byte',
'char': 'Char',
'short': 'Short',
'int': 'Int',
'long': 'Long',
'float': 'Float',
'void': 'Void',
'double': 'Double',
'Object': 'Object',
}
call = env_call_map.get(return_type, 'Object')
if is_static:
call = 'Static' + call
return 'Call' + call + 'Method'
def GetMangledParam(datatype):
"""Returns a mangled identifier for the datatype."""
if len(datatype) <= 2:
return datatype.replace('[', 'A')
ret = ''
for i in range(1, len(datatype)):
c = datatype[i]
if c == '[':
ret += 'A'
elif c.isupper() or datatype[i - 1] in ['/', 'L']:
ret += c.upper()
return ret
def GetMangledMethodName(name, params, return_type):
"""Returns a mangled method name for the given signature.
The returned name can be used as a C identifier and will be unique for all
valid overloads of the same method.
Args:
name: string.
params: list of Param.
return_type: string.
Returns:
A mangled name.
"""
mangled_items = []
for datatype in [return_type] + [x.datatype for x in params]:
mangled_items += [GetMangledParam(JniParams.JavaToJni(datatype))]
mangled_name = name + '_'.join(mangled_items)
assert re.match(r'[0-9a-zA-Z_]+', mangled_name)
return mangled_name
def MangleCalledByNatives(called_by_natives):
"""Mangles all the overloads from the call_by_natives list."""
method_counts = collections.defaultdict(
lambda: collections.defaultdict(lambda: 0))
for called_by_native in called_by_natives:
java_class_name = called_by_native.java_class_name
name = called_by_native.name
method_counts[java_class_name][name] += 1
for called_by_native in called_by_natives:
java_class_name = called_by_native.java_class_name
method_name = called_by_native.name
method_id_var_name = method_name
if method_counts[java_class_name][method_name] > 1:
method_id_var_name = GetMangledMethodName(method_name,
called_by_native.params,
called_by_native.return_type)
called_by_native.method_id_var_name = method_id_var_name
return called_by_natives
# Regex to match the JNI return types that should be included in a
# ScopedJavaLocalRef.
RE_SCOPED_JNI_RETURN_TYPES = re.compile('jobject|jclass|jstring|.*Array')
# Regex to match a string like "@CalledByNative public void foo(int bar)".
RE_CALLED_BY_NATIVE = re.compile(
'@CalledByNative(?P<Unchecked>(Unchecked)*?)(?:\("(?P<annotation>.*)"\))?'
'\s+(?P<prefix>[\w ]*?)'
'\s*(?P<return_type>\S+?)'
'\s+(?P<name>\w+)'
'\s*\((?P<params>[^\)]*)\)')
def ExtractCalledByNatives(contents):
"""Parses all methods annotated with @CalledByNative.
Args:
contents: the contents of the java file.
Returns:
A list of dict with information about the annotated methods.
TODO(bulach): return a CalledByNative object.
Raises:
ParseError: if unable to parse.
"""
called_by_natives = []
for match in re.finditer(RE_CALLED_BY_NATIVE, contents):
called_by_natives += [CalledByNative(
system_class=False,
unchecked='Unchecked' in match.group('Unchecked'),
static='static' in match.group('prefix'),
java_class_name=match.group('annotation') or '',
return_type=match.group('return_type'),
name=match.group('name'),
params=JniParams.Parse(match.group('params')))]
# Check for any @CalledByNative occurrences that weren't matched.
unmatched_lines = re.sub(RE_CALLED_BY_NATIVE, '', contents).split('\n')
for line1, line2 in zip(unmatched_lines, unmatched_lines[1:]):
if '@CalledByNative' in line1:
raise ParseError('could not parse @CalledByNative method signature',
line1, line2)
return MangleCalledByNatives(called_by_natives)
class JNIFromJavaP(object):
"""Uses 'javap' to parse a .class file and generate the JNI header file."""
def __init__(self, contents, namespace):
self.contents = contents
self.namespace = namespace
self.fully_qualified_class = re.match(
'.*?(class|interface) (?P<class_name>.*?)( |{)',
contents[1]).group('class_name')
self.fully_qualified_class = self.fully_qualified_class.replace('.', '/')
JniParams.SetFullyQualifiedClass(self.fully_qualified_class)
self.java_class_name = self.fully_qualified_class.split('/')[-1]
if not self.namespace:
self.namespace = 'JNI_' + self.java_class_name
re_method = re.compile('(?P<prefix>.*?)(?P<return_type>\S+?) (?P<name>\w+?)'
'\((?P<params>.*?)\)')
self.called_by_natives = []
for content in contents[2:]:
match = re.match(re_method, content)
if not match:
continue
self.called_by_natives += [CalledByNative(
system_class=True,
unchecked=False,
static='static' in match.group('prefix'),
java_class_name='',
return_type=match.group('return_type').replace('.', '/'),
name=match.group('name'),
params=JniParams.Parse(match.group('params').replace('.', '/')))]
re_constructor = re.compile('.*? public ' +
self.fully_qualified_class.replace('/', '.') +
'\((?P<params>.*?)\)')
for content in contents[2:]:
match = re.match(re_constructor, content)
if not match:
continue
self.called_by_natives += [CalledByNative(
system_class=True,
unchecked=False,
static=False,
java_class_name='',
return_type=self.fully_qualified_class,
name='Constructor',
params=JniParams.Parse(match.group('params').replace('.', '/')),
is_constructor=True)]
self.called_by_natives = MangleCalledByNatives(self.called_by_natives)
self.inl_header_file_generator = InlHeaderFileGenerator(
self.namespace, self.fully_qualified_class, [], self.called_by_natives)
def GetContent(self):
return self.inl_header_file_generator.GetContent()
@staticmethod
def CreateFromClass(class_file, namespace):
class_name = os.path.splitext(os.path.basename(class_file))[0]
p = subprocess.Popen(args=['javap', class_name],
cwd=os.path.dirname(class_file),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, _ = p.communicate()
jni_from_javap = JNIFromJavaP(stdout.split('\n'), namespace)
return jni_from_javap
class JNIFromJavaSource(object):
"""Uses the given java source file to generate the JNI header file."""
def __init__(self, contents, fully_qualified_class):
contents = self._RemoveComments(contents)
JniParams.SetFullyQualifiedClass(fully_qualified_class)
JniParams.ExtractImportsAndInnerClasses(contents)
jni_namespace = ExtractJNINamespace(contents)
natives = ExtractNatives(contents)
called_by_natives = ExtractCalledByNatives(contents)
if len(natives) == 0 and len(called_by_natives) == 0:
raise SyntaxError('Unable to find any JNI methods for %s.' %
fully_qualified_class)
inl_header_file_generator = InlHeaderFileGenerator(
jni_namespace, fully_qualified_class, natives, called_by_natives)
self.content = inl_header_file_generator.GetContent()
def _RemoveComments(self, contents):
# We need to support both inline and block comments, and we need to handle
# strings that contain '//' or '/*'. Rather than trying to do all that with
# regexps, we just pipe the contents through the C preprocessor. We tell cpp
# the file has already been preprocessed, so it just removes comments and
# doesn't try to parse #include, #pragma etc.
#
# TODO(husky): This is a bit hacky. It would be cleaner to use a real Java
# parser. Maybe we could ditch JNIFromJavaSource and just always use
# JNIFromJavaP; or maybe we could rewrite this script in Java and use APT.
# http://code.google.com/p/chromium/issues/detail?id=138941
p = subprocess.Popen(args=['cpp', '-fpreprocessed'],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, _ = p.communicate(contents)
return stdout
def GetContent(self):
return self.content
@staticmethod
def CreateFromFile(java_file_name):
contents = file(java_file_name).read()
fully_qualified_class = ExtractFullyQualifiedJavaClassName(java_file_name,
contents)
return JNIFromJavaSource(contents, fully_qualified_class)
class InlHeaderFileGenerator(object):
"""Generates an inline header file for JNI integration."""
def __init__(self, namespace, fully_qualified_class, natives,
called_by_natives):
self.namespace = namespace
self.fully_qualified_class = fully_qualified_class
self.class_name = self.fully_qualified_class.split('/')[-1]
self.natives = natives
self.called_by_natives = called_by_natives
self.header_guard = fully_qualified_class.replace('/', '_') + '_JNI'
def GetContent(self):
"""Returns the content of the JNI binding file."""
template = Template("""\
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// This file is autogenerated by
// ${SCRIPT_NAME}
// For
// ${FULLY_QUALIFIED_CLASS}
#ifndef ${HEADER_GUARD}
#define ${HEADER_GUARD}
#include <jni.h>
#include "base/android/jni_android.h"
#include "base/android/scoped_java_ref.h"
#include "base/basictypes.h"
#include "base/logging.h"
using base::android::ScopedJavaLocalRef;
// Step 1: forward declarations.
namespace {
$CLASS_PATH_DEFINITIONS
} // namespace
$OPEN_NAMESPACE
$FORWARD_DECLARATIONS
// Step 2: method stubs.
$METHOD_STUBS
// Step 3: RegisterNatives.
static bool RegisterNativesImpl(JNIEnv* env) {
$REGISTER_NATIVES_IMPL
return true;
}
$CLOSE_NAMESPACE
#endif // ${HEADER_GUARD}
""")
script_components = os.path.abspath(sys.argv[0]).split(os.path.sep)
base_index = script_components.index('base')
script_name = os.sep.join(script_components[base_index:])
values = {
'SCRIPT_NAME': script_name,
'FULLY_QUALIFIED_CLASS': self.fully_qualified_class,
'CLASS_PATH_DEFINITIONS': self.GetClassPathDefinitionsString(),
'FORWARD_DECLARATIONS': self.GetForwardDeclarationsString(),
'METHOD_STUBS': self.GetMethodStubsString(),
'OPEN_NAMESPACE': self.GetOpenNamespaceString(),
'REGISTER_NATIVES_IMPL': self.GetRegisterNativesImplString(),
'CLOSE_NAMESPACE': self.GetCloseNamespaceString(),
'HEADER_GUARD': self.header_guard,
}
return WrapOutput(template.substitute(values))
def GetClassPathDefinitionsString(self):
ret = []
ret += [self.GetClassPathDefinitions()]
return '\n'.join(ret)
def GetForwardDeclarationsString(self):
ret = []
for native in self.natives:
if native.type != 'method':
ret += [self.GetForwardDeclaration(native)]
return '\n'.join(ret)
def GetMethodStubsString(self):
ret = []
for native in self.natives:
if native.type == 'method':
ret += [self.GetNativeMethodStub(native)]
for called_by_native in self.called_by_natives:
ret += [self.GetCalledByNativeMethodStub(called_by_native)]
return '\n'.join(ret)
def GetKMethodsString(self, clazz):
ret = []
for native in self.natives:
if (native.java_class_name == clazz or
(not native.java_class_name and clazz == self.class_name)):
ret += [self.GetKMethodArrayEntry(native)]
return '\n'.join(ret)
def GetRegisterNativesImplString(self):
"""Returns the implementation for RegisterNatives."""
template = Template("""\
static const JNINativeMethod kMethods${JAVA_CLASS}[] = {
${KMETHODS}
};
const int kMethods${JAVA_CLASS}Size = arraysize(kMethods${JAVA_CLASS});
if (env->RegisterNatives(g_${JAVA_CLASS}_clazz,
kMethods${JAVA_CLASS},
kMethods${JAVA_CLASS}Size) < 0) {
LOG(ERROR) << "RegisterNatives failed in " << __FILE__;
return false;
}
""")
ret = [self.GetFindClasses()]
all_classes = self.GetUniqueClasses(self.natives)
all_classes[self.class_name] = self.fully_qualified_class
for clazz in all_classes:
kmethods = self.GetKMethodsString(clazz)
if kmethods:
values = {'JAVA_CLASS': clazz,
'KMETHODS': kmethods}
ret += [template.substitute(values)]
if not ret: return ''
return '\n' + '\n'.join(ret)
def GetOpenNamespaceString(self):
if self.namespace:
all_namespaces = ['namespace %s {' % ns
for ns in self.namespace.split('::')]
return '\n'.join(all_namespaces)
return ''
def GetCloseNamespaceString(self):
if self.namespace:
all_namespaces = ['} // namespace %s' % ns
for ns in self.namespace.split('::')]
all_namespaces.reverse()
return '\n'.join(all_namespaces) + '\n'
return ''
def GetJNIFirstParam(self, native):
ret = []
if native.type == 'method':
ret = ['jobject obj']
elif native.type == 'function':
if native.static:
ret = ['jclass clazz']
else:
ret = ['jobject obj']
return ret
def GetParamsInDeclaration(self, native):
"""Returns the params for the stub declaration.
Args:
native: the native dictionary describing the method.
Returns:
A string containing the params.
"""
return ',\n '.join(self.GetJNIFirstParam(native) +
[JavaDataTypeToC(param.datatype) + ' ' +
param.name
for param in native.params])
def GetCalledByNativeParamsInDeclaration(self, called_by_native):
return ',\n '.join([JavaDataTypeToC(param.datatype) + ' ' +
param.name
for param in called_by_native.params])
def GetForwardDeclaration(self, native):
template = Template("""
static ${RETURN} ${NAME}(JNIEnv* env, ${PARAMS});
""")
values = {'RETURN': JavaDataTypeToC(native.return_type),
'NAME': native.name,
'PARAMS': self.GetParamsInDeclaration(native)}
return template.substitute(values)
def GetNativeMethodStub(self, native):
"""Returns stubs for native methods."""
template = Template("""\
static ${RETURN} ${NAME}(JNIEnv* env, ${PARAMS_IN_DECLARATION}) {
DCHECK(${PARAM0_NAME}) << "${NAME}";
${P0_TYPE}* native = reinterpret_cast<${P0_TYPE}*>(${PARAM0_NAME});
return native->${NAME}(env, obj${PARAMS_IN_CALL})${POST_CALL};
}
""")
params_for_call = ', '.join(p.name for p in native.params[1:])
if params_for_call:
params_for_call = ', ' + params_for_call
return_type = JavaDataTypeToC(native.return_type)
if re.match(RE_SCOPED_JNI_RETURN_TYPES, return_type):
scoped_return_type = 'ScopedJavaLocalRef<' + return_type + '>'
post_call = '.Release()'
else:
scoped_return_type = return_type
post_call = ''
values = {
'RETURN': return_type,
'SCOPED_RETURN': scoped_return_type,
'NAME': native.name,
'PARAMS_IN_DECLARATION': self.GetParamsInDeclaration(native),
'PARAM0_NAME': native.params[0].name,
'P0_TYPE': native.p0_type,
'PARAMS_IN_CALL': params_for_call,
'POST_CALL': post_call
}
return template.substitute(values)
def GetCalledByNativeMethodStub(self, called_by_native):
"""Returns a string."""
function_signature_template = Template("""\
static ${RETURN_TYPE} Java_${JAVA_CLASS}_${METHOD_ID_VAR_NAME}(\
JNIEnv* env${FIRST_PARAM_IN_DECLARATION}${PARAMS_IN_DECLARATION})""")
function_header_template = Template("""\
${FUNCTION_SIGNATURE} {""")
function_header_with_unused_template = Template("""\
${FUNCTION_SIGNATURE} __attribute__ ((unused));
${FUNCTION_SIGNATURE} {""")
template = Template("""
static base::subtle::AtomicWord g_${JAVA_CLASS}_${METHOD_ID_VAR_NAME} = 0;
${FUNCTION_HEADER}
/* Must call RegisterNativesImpl() */
DCHECK(g_${JAVA_CLASS}_clazz);
jmethodID method_id =
${GET_METHOD_ID_IMPL}
${RETURN_DECLARATION}
${PRE_CALL}env->${ENV_CALL}(${FIRST_PARAM_IN_CALL},
method_id${PARAMS_IN_CALL})${POST_CALL};
${CHECK_EXCEPTION}
${RETURN_CLAUSE}
}""")
if called_by_native.static or called_by_native.is_constructor:
first_param_in_declaration = ''
first_param_in_call = ('g_%s_clazz' %
(called_by_native.java_class_name or
self.class_name))
else:
first_param_in_declaration = ', jobject obj'
first_param_in_call = 'obj'
params_in_declaration = self.GetCalledByNativeParamsInDeclaration(
called_by_native)
if params_in_declaration:
params_in_declaration = ', ' + params_in_declaration
params_for_call = ', '.join(param.name
for param in called_by_native.params)
if params_for_call:
params_for_call = ', ' + params_for_call
pre_call = ''
post_call = ''
if called_by_native.static_cast:
pre_call = 'static_cast<%s>(' % called_by_native.static_cast
post_call = ')'
check_exception = ''
if not called_by_native.unchecked:
check_exception = 'base::android::CheckException(env);'
return_type = JavaDataTypeToC(called_by_native.return_type)
return_declaration = ''
return_clause = ''
if return_type != 'void':
pre_call = ' ' + pre_call
return_declaration = return_type + ' ret ='
if re.match(RE_SCOPED_JNI_RETURN_TYPES, return_type):
return_type = 'ScopedJavaLocalRef<' + return_type + '>'
return_clause = 'return ' + return_type + '(env, ret);'
else:
return_clause = 'return ret;'
values = {
'JAVA_CLASS': called_by_native.java_class_name or self.class_name,
'METHOD': called_by_native.name,
'RETURN_TYPE': return_type,
'RETURN_DECLARATION': return_declaration,
'RETURN_CLAUSE': return_clause,
'FIRST_PARAM_IN_DECLARATION': first_param_in_declaration,
'PARAMS_IN_DECLARATION': params_in_declaration,
'STATIC': 'Static' if called_by_native.static else '',
'PRE_CALL': pre_call,
'POST_CALL': post_call,
'ENV_CALL': called_by_native.env_call,
'FIRST_PARAM_IN_CALL': first_param_in_call,
'PARAMS_IN_CALL': params_for_call,
'METHOD_ID_VAR_NAME': called_by_native.method_id_var_name,
'CHECK_EXCEPTION': check_exception,
'GET_METHOD_ID_IMPL': self.GetMethodIDImpl(called_by_native)
}
values['FUNCTION_SIGNATURE'] = (
function_signature_template.substitute(values))
if called_by_native.system_class:
values['FUNCTION_HEADER'] = (
function_header_with_unused_template.substitute(values))
else:
|
return template.substitute(values)
def GetKMethodArrayEntry(self, native):
template = Template("""\
{ "native${NAME}", ${JNI_SIGNATURE}, reinterpret_cast<void*>(${NAME}) },""")
values = {'NAME': native.name,
'JNI_SIGNATURE': JniParams.Signature(native.params,
native.return_type,
True)}
return template.substitute(values)
def GetUniqueClasses(self, origin):
ret = {self.class_name: self.fully_qualified_class}
for entry in origin:
class_name = self.class_name
jni_class_path = self.fully_qualified_class
if entry.java_class_name:
class_name = entry.java_class_name
jni_class_path = self.fully_qualified_class + '$' + class_name
ret[class_name] = jni_class_path
return ret
def GetClassPathDefinitions(self):
"""Returns the ClassPath constants."""
ret = []
template = Template("""\
const char k${JAVA_CLASS}ClassPath[] = "${JNI_CLASS_PATH}";""")
native_classes = self.GetUniqueClasses(self.natives)
called_by_native_classes = self.GetUniqueClasses(self.called_by_natives)
all_classes = native_classes
all_classes.update(called_by_native_classes)
for clazz in all_classes:
values = {
'JAVA_CLASS': clazz,
'JNI_CLASS_PATH': all_classes[clazz],
}
ret += [template.substitute(values)]
ret += ''
for clazz in called_by_native_classes:
template = Template("""\
// Leaking this jclass as we cannot use LazyInstance from some threads.
jclass g_${JAVA_CLASS}_clazz = NULL;""")
values = {
'JAVA_CLASS': clazz,
}
ret += [template.substitute(values)]
return '\n'.join(ret)
def GetFindClasses(self):
"""Returns the imlementation of FindClass for all known classes."""
template = Template("""\
g_${JAVA_CLASS}_clazz = reinterpret_cast<jclass>(env->NewGlobalRef(
base::android::GetClass(env, k${JAVA_CLASS}ClassPath).obj()));""")
ret = []
for clazz in self.GetUniqueClasses(self.called_by_natives):
values = {'JAVA_CLASS': clazz}
ret += [template.substitute(values)]
return '\n'.join(ret)
def GetMethodIDImpl(self, called_by_native):
"""Returns the implementation of GetMethodID."""
template = Template("""\
base::android::MethodID::LazyGet<
base::android::MethodID::TYPE_${STATIC}>(
env, g_${JAVA_CLASS}_clazz,
"${JNI_NAME}",
${JNI_SIGNATURE},
&g_${JAVA_CLASS}_${METHOD_ID_VAR_NAME});
""")
jni_name = called_by_native.name
jni_return_type = called_by_native.return_type
if called_by_native.is_constructor:
jni_name = '<init>'
jni_return_type = 'void'
values = {
'JAVA_CLASS': called_by_native.java_class_name or self.class_name,
'JNI_NAME': jni_name,
'METHOD_ID_VAR_NAME': called_by_native.method_id_var_name,
'STATIC': 'STATIC' if called_by_native.static else 'INSTANCE',
'JNI_SIGNATURE': JniParams.Signature(called_by_native.params,
jni_return_type,
True)
}
return template.substitute(values)
def WrapOutput(output):
ret = []
for line in output.splitlines():
# Do not wrap lines under 80 characters or preprocessor directives.
if len(line) < 80 or line.lstrip()[:1] == '#':
stripped = line.rstrip()
if len(ret) == 0 or len(ret[-1]) or len(stripped):
ret.append(stripped)
else:
first_line_indent = ' ' * (len(line) - len(line.lstrip()))
subsequent_indent = first_line_indent + ' ' * 4
if line.startswith('//'):
subsequent_indent = '//' + subsequent_indent
wrapper = textwrap.TextWrapper(width=80,
subsequent_indent=subsequent_indent,
break_long_words=False)
ret += [wrapped.rstrip() for wrapped in wrapper.wrap(line)]
ret += ['']
return '\n'.join(ret)
def ExtractJarInputFile(jar_file, input_file, out_dir):
"""Extracts input file from jar and returns the filename.
The input file is extracted to the same directory that the generated jni
headers will be placed in. This is passed as an argument to script.
Args:
jar_file: the jar file containing the input files to extract.
input_files: the list of files to extract from the jar file.
out_dir: the name of the directories to extract to.
Returns:
the name of extracted input file.
"""
jar_file = zipfile.ZipFile(jar_file)
out_dir = os.path.join(out_dir, os.path.dirname(input_file))
try:
os.makedirs(out_dir)
except OSError as e:
if e.errno != errno.EEXIST:
raise
extracted_file_name = os.path.join(out_dir, os.path.basename(input_file))
with open(extracted_file_name, 'w') as outfile:
outfile.write(jar_file.read(input_file))
return extracted_file_name
def GenerateJNIHeader(input_file, output_file, namespace, skip_if_same):
try:
if os.path.splitext(input_file)[1] == '.class':
jni_from_javap = JNIFromJavaP.CreateFromClass(input_file, namespace)
content = jni_from_javap.GetContent()
else:
jni_from_java_source = JNIFromJavaSource.CreateFromFile(input_file)
content = jni_from_java_source.GetContent()
except ParseError, e:
print e
sys.exit(1)
if output_file:
if not os.path.exists(os.path.dirname(os.path.abspath(output_file))):
os.makedirs(os.path.dirname(os.path.abspath(output_file)))
if skip_if_same and os.path.exists(output_file):
with file(output_file, 'r') as f:
existing_content = f.read()
if existing_content == content:
return
with file(output_file, 'w') as f:
f.write(content)
else:
print output
def main(argv):
usage = """usage: %prog [OPTIONS]
This script will parse the given java source code extracting the native
declarations and print the header file to stdout (or a file).
See SampleForTests.java for more details.
"""
option_parser = optparse.OptionParser(usage=usage)
option_parser.add_option('-j', dest='jar_file',
help='Extract the list of input files from'
' a specified jar file.'
' Uses javap to extract the methods from a'
' pre-compiled class. --input should point'
' to pre-compiled Java .class files.')
option_parser.add_option('-n', dest='namespace',
help='Uses as a namespace in the generated header,'
' instead of the javap class name.')
option_parser.add_option('--input_file',
help='Single input file name. The output file name '
'will be derived from it. Must be used with '
'--output_dir.')
option_parser.add_option('--output_dir',
help='The output directory. Must be used with '
'--input')
option_parser.add_option('--optimize_generation', type="int",
default=0, help='Whether we should optimize JNI '
'generation by not regenerating files if they have '
'not changed.')
options, args = option_parser.parse_args(argv)
if options.jar_file:
input_file = ExtractJarInputFile(options.jar_file, options.input_file,
options.output_dir)
else:
input_file = options.input_file
output_file = None
if options.output_dir:
root_name = os.path.splitext(os.path.basename(input_file))[0]
output_file = os.path.join(options.output_dir, root_name) + '_jni.h'
GenerateJNIHeader(input_file, output_file, options.namespace,
options.optimize_generation)
if __name__ == '__main__':
sys.exit(main(sys.argv))
| values['FUNCTION_HEADER'] = function_header_template.substitute(values) |
acl.rs | // This file is generated by rust-protobuf 2.23.0. Do not edit
// @generated
// https://github.com/rust-lang/rust-clippy/issues/702
#![allow(unknown_lints)]
#![allow(clippy::all)]
#![allow(unused_attributes)]
#![cfg_attr(rustfmt, rustfmt::skip)]
#![allow(box_pointers)]
#![allow(dead_code)]
#![allow(missing_docs)]
#![allow(non_camel_case_types)]
#![allow(non_snake_case)]
#![allow(non_upper_case_globals)]
#![allow(trivial_casts)]
#![allow(unused_imports)]
#![allow(unused_results)]
//! Generated file from `acl.proto`
/// Generated files are compatible only with the same version
/// of protobuf runtime.
// const _PROTOBUF_VERSION_CHECK: () = ::protobuf::VERSION_2_23_0;
#[derive(PartialEq,Clone,Default)]
pub struct FsPermissionProto {
// message fields
perm: ::std::option::Option<u32>,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a FsPermissionProto {
fn default() -> &'a FsPermissionProto {
<FsPermissionProto as ::protobuf::Message>::default_instance()
}
}
impl FsPermissionProto {
pub fn new() -> FsPermissionProto {
::std::default::Default::default()
}
// required uint32 perm = 1;
pub fn get_perm(&self) -> u32 {
self.perm.unwrap_or(0)
}
pub fn clear_perm(&mut self) {
self.perm = ::std::option::Option::None;
}
pub fn has_perm(&self) -> bool {
self.perm.is_some()
}
// Param is passed by value, moved
pub fn set_perm(&mut self, v: u32) {
self.perm = ::std::option::Option::Some(v);
}
}
impl ::protobuf::Message for FsPermissionProto {
fn is_initialized(&self) -> bool {
if self.perm.is_none() {
return false;
}
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
if wire_type != ::protobuf::wire_format::WireTypeVarint {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
}
let tmp = is.read_uint32()?;
self.perm = ::std::option::Option::Some(tmp);
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if let Some(v) = self.perm {
my_size += ::protobuf::rt::value_size(1, v, ::protobuf::wire_format::WireTypeVarint);
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if let Some(v) = self.perm {
os.write_uint32(1, v)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> FsPermissionProto {
FsPermissionProto::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_option_accessor::<_, ::protobuf::types::ProtobufTypeUint32>(
"perm",
|m: &FsPermissionProto| { &m.perm },
|m: &mut FsPermissionProto| { &mut m.perm },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<FsPermissionProto>(
"FsPermissionProto",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static FsPermissionProto {
static instance: ::protobuf::rt::LazyV2<FsPermissionProto> = ::protobuf::rt::LazyV2::INIT;
instance.get(FsPermissionProto::new)
}
}
impl ::protobuf::Clear for FsPermissionProto {
fn clear(&mut self) {
self.perm = ::std::option::Option::None;
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for FsPermissionProto {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for FsPermissionProto {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct AclEntryProto {
// message fields
field_type: ::std::option::Option<AclEntryProto_AclEntryTypeProto>,
scope: ::std::option::Option<AclEntryProto_AclEntryScopeProto>,
permissions: ::std::option::Option<AclEntryProto_FsActionProto>,
name: ::protobuf::SingularField<::std::string::String>,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a AclEntryProto {
fn default() -> &'a AclEntryProto {
<AclEntryProto as ::protobuf::Message>::default_instance()
}
}
impl AclEntryProto {
pub fn new() -> AclEntryProto {
::std::default::Default::default()
}
// required .hadoop.hdfs.AclEntryProto.AclEntryTypeProto type = 1;
pub fn get_field_type(&self) -> AclEntryProto_AclEntryTypeProto {
self.field_type.unwrap_or(AclEntryProto_AclEntryTypeProto::USER)
}
pub fn clear_field_type(&mut self) {
self.field_type = ::std::option::Option::None;
}
pub fn has_field_type(&self) -> bool {
self.field_type.is_some()
}
// Param is passed by value, moved
pub fn set_field_type(&mut self, v: AclEntryProto_AclEntryTypeProto) {
self.field_type = ::std::option::Option::Some(v);
}
// required .hadoop.hdfs.AclEntryProto.AclEntryScopeProto scope = 2;
pub fn get_scope(&self) -> AclEntryProto_AclEntryScopeProto {
self.scope.unwrap_or(AclEntryProto_AclEntryScopeProto::ACCESS)
}
pub fn clear_scope(&mut self) {
self.scope = ::std::option::Option::None;
}
pub fn has_scope(&self) -> bool {
self.scope.is_some()
}
// Param is passed by value, moved
pub fn set_scope(&mut self, v: AclEntryProto_AclEntryScopeProto) {
self.scope = ::std::option::Option::Some(v);
}
// required .hadoop.hdfs.AclEntryProto.FsActionProto permissions = 3;
pub fn get_permissions(&self) -> AclEntryProto_FsActionProto {
self.permissions.unwrap_or(AclEntryProto_FsActionProto::NONE)
}
pub fn clear_permissions(&mut self) {
self.permissions = ::std::option::Option::None;
}
pub fn has_permissions(&self) -> bool {
self.permissions.is_some()
}
// Param is passed by value, moved
pub fn set_permissions(&mut self, v: AclEntryProto_FsActionProto) {
self.permissions = ::std::option::Option::Some(v);
}
// optional string name = 4;
pub fn get_name(&self) -> &str {
match self.name.as_ref() {
Some(v) => &v,
None => "",
}
}
pub fn clear_name(&mut self) {
self.name.clear();
}
pub fn has_name(&self) -> bool {
self.name.is_some()
}
// Param is passed by value, moved
pub fn set_name(&mut self, v: ::std::string::String) {
self.name = ::protobuf::SingularField::some(v);
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_name(&mut self) -> &mut ::std::string::String {
if self.name.is_none() {
self.name.set_default();
}
self.name.as_mut().unwrap()
}
// Take field
pub fn take_name(&mut self) -> ::std::string::String {
self.name.take().unwrap_or_else(|| ::std::string::String::new())
}
}
impl ::protobuf::Message for AclEntryProto {
fn is_initialized(&self) -> bool {
if self.field_type.is_none() {
return false;
}
if self.scope.is_none() {
return false;
}
if self.permissions.is_none() {
return false;
}
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_proto2_enum_with_unknown_fields_into(wire_type, is, &mut self.field_type, 1, &mut self.unknown_fields)?
},
2 => {
::protobuf::rt::read_proto2_enum_with_unknown_fields_into(wire_type, is, &mut self.scope, 2, &mut self.unknown_fields)?
},
3 => {
::protobuf::rt::read_proto2_enum_with_unknown_fields_into(wire_type, is, &mut self.permissions, 3, &mut self.unknown_fields)?
},
4 => {
::protobuf::rt::read_singular_string_into(wire_type, is, &mut self.name)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if let Some(v) = self.field_type {
my_size += ::protobuf::rt::enum_size(1, v);
}
if let Some(v) = self.scope {
my_size += ::protobuf::rt::enum_size(2, v);
}
if let Some(v) = self.permissions {
my_size += ::protobuf::rt::enum_size(3, v);
}
if let Some(ref v) = self.name.as_ref() {
my_size += ::protobuf::rt::string_size(4, &v);
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if let Some(v) = self.field_type {
os.write_enum(1, ::protobuf::ProtobufEnum::value(&v))?;
}
if let Some(v) = self.scope {
os.write_enum(2, ::protobuf::ProtobufEnum::value(&v))?;
}
if let Some(v) = self.permissions {
os.write_enum(3, ::protobuf::ProtobufEnum::value(&v))?;
}
if let Some(ref v) = self.name.as_ref() {
os.write_string(4, &v)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> AclEntryProto {
AclEntryProto::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_option_accessor::<_, ::protobuf::types::ProtobufTypeEnum<AclEntryProto_AclEntryTypeProto>>(
"type",
|m: &AclEntryProto| { &m.field_type },
|m: &mut AclEntryProto| { &mut m.field_type },
));
fields.push(::protobuf::reflect::accessor::make_option_accessor::<_, ::protobuf::types::ProtobufTypeEnum<AclEntryProto_AclEntryScopeProto>>(
"scope",
|m: &AclEntryProto| { &m.scope },
|m: &mut AclEntryProto| { &mut m.scope },
));
fields.push(::protobuf::reflect::accessor::make_option_accessor::<_, ::protobuf::types::ProtobufTypeEnum<AclEntryProto_FsActionProto>>(
"permissions",
|m: &AclEntryProto| { &m.permissions },
|m: &mut AclEntryProto| { &mut m.permissions },
));
fields.push(::protobuf::reflect::accessor::make_singular_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"name",
|m: &AclEntryProto| { &m.name },
|m: &mut AclEntryProto| { &mut m.name },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<AclEntryProto>(
"AclEntryProto",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static AclEntryProto {
static instance: ::protobuf::rt::LazyV2<AclEntryProto> = ::protobuf::rt::LazyV2::INIT;
instance.get(AclEntryProto::new)
}
}
impl ::protobuf::Clear for AclEntryProto {
fn clear(&mut self) {
self.field_type = ::std::option::Option::None;
self.scope = ::std::option::Option::None;
self.permissions = ::std::option::Option::None;
self.name.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for AclEntryProto {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for AclEntryProto {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(Clone,PartialEq,Eq,Debug,Hash)]
pub enum AclEntryProto_AclEntryScopeProto {
ACCESS = 0,
DEFAULT = 1,
}
impl ::protobuf::ProtobufEnum for AclEntryProto_AclEntryScopeProto {
fn value(&self) -> i32 {
*self as i32
}
fn from_i32(value: i32) -> ::std::option::Option<AclEntryProto_AclEntryScopeProto> {
match value {
0 => ::std::option::Option::Some(AclEntryProto_AclEntryScopeProto::ACCESS),
1 => ::std::option::Option::Some(AclEntryProto_AclEntryScopeProto::DEFAULT),
_ => ::std::option::Option::None
}
}
fn values() -> &'static [Self] {
static values: &'static [AclEntryProto_AclEntryScopeProto] = &[
AclEntryProto_AclEntryScopeProto::ACCESS,
AclEntryProto_AclEntryScopeProto::DEFAULT,
];
values
}
fn enum_descriptor_static() -> &'static ::protobuf::reflect::EnumDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::EnumDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
::protobuf::reflect::EnumDescriptor::new_pb_name::<AclEntryProto_AclEntryScopeProto>("AclEntryProto.AclEntryScopeProto", file_descriptor_proto())
})
}
}
impl ::std::marker::Copy for AclEntryProto_AclEntryScopeProto {
}
impl ::std::default::Default for AclEntryProto_AclEntryScopeProto {
fn default() -> Self {
AclEntryProto_AclEntryScopeProto::ACCESS
}
}
impl ::protobuf::reflect::ProtobufValue for AclEntryProto_AclEntryScopeProto {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Enum(::protobuf::ProtobufEnum::descriptor(self))
}
}
#[derive(Clone,PartialEq,Eq,Debug,Hash)]
pub enum AclEntryProto_AclEntryTypeProto {
USER = 0,
GROUP = 1,
MASK = 2,
OTHER = 3,
}
impl ::protobuf::ProtobufEnum for AclEntryProto_AclEntryTypeProto {
fn value(&self) -> i32 {
*self as i32
}
fn from_i32(value: i32) -> ::std::option::Option<AclEntryProto_AclEntryTypeProto> {
match value {
0 => ::std::option::Option::Some(AclEntryProto_AclEntryTypeProto::USER),
1 => ::std::option::Option::Some(AclEntryProto_AclEntryTypeProto::GROUP),
2 => ::std::option::Option::Some(AclEntryProto_AclEntryTypeProto::MASK),
3 => ::std::option::Option::Some(AclEntryProto_AclEntryTypeProto::OTHER),
_ => ::std::option::Option::None
}
}
fn values() -> &'static [Self] {
static values: &'static [AclEntryProto_AclEntryTypeProto] = &[
AclEntryProto_AclEntryTypeProto::USER,
AclEntryProto_AclEntryTypeProto::GROUP,
AclEntryProto_AclEntryTypeProto::MASK,
AclEntryProto_AclEntryTypeProto::OTHER,
];
values
}
fn enum_descriptor_static() -> &'static ::protobuf::reflect::EnumDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::EnumDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
::protobuf::reflect::EnumDescriptor::new_pb_name::<AclEntryProto_AclEntryTypeProto>("AclEntryProto.AclEntryTypeProto", file_descriptor_proto())
})
}
}
impl ::std::marker::Copy for AclEntryProto_AclEntryTypeProto {
}
impl ::std::default::Default for AclEntryProto_AclEntryTypeProto {
fn default() -> Self {
AclEntryProto_AclEntryTypeProto::USER
}
}
impl ::protobuf::reflect::ProtobufValue for AclEntryProto_AclEntryTypeProto {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Enum(::protobuf::ProtobufEnum::descriptor(self))
}
}
#[derive(Clone,PartialEq,Eq,Debug,Hash)]
pub enum AclEntryProto_FsActionProto {
NONE = 0,
EXECUTE = 1,
WRITE = 2,
WRITE_EXECUTE = 3,
READ = 4,
READ_EXECUTE = 5,
READ_WRITE = 6,
PERM_ALL = 7,
}
impl ::protobuf::ProtobufEnum for AclEntryProto_FsActionProto {
fn value(&self) -> i32 {
*self as i32
}
fn from_i32(value: i32) -> ::std::option::Option<AclEntryProto_FsActionProto> {
match value {
0 => ::std::option::Option::Some(AclEntryProto_FsActionProto::NONE),
1 => ::std::option::Option::Some(AclEntryProto_FsActionProto::EXECUTE),
2 => ::std::option::Option::Some(AclEntryProto_FsActionProto::WRITE),
3 => ::std::option::Option::Some(AclEntryProto_FsActionProto::WRITE_EXECUTE),
4 => ::std::option::Option::Some(AclEntryProto_FsActionProto::READ),
5 => ::std::option::Option::Some(AclEntryProto_FsActionProto::READ_EXECUTE),
6 => ::std::option::Option::Some(AclEntryProto_FsActionProto::READ_WRITE),
7 => ::std::option::Option::Some(AclEntryProto_FsActionProto::PERM_ALL),
_ => ::std::option::Option::None
}
}
fn values() -> &'static [Self] {
static values: &'static [AclEntryProto_FsActionProto] = &[
AclEntryProto_FsActionProto::NONE,
AclEntryProto_FsActionProto::EXECUTE,
AclEntryProto_FsActionProto::WRITE,
AclEntryProto_FsActionProto::WRITE_EXECUTE,
AclEntryProto_FsActionProto::READ,
AclEntryProto_FsActionProto::READ_EXECUTE,
AclEntryProto_FsActionProto::READ_WRITE,
AclEntryProto_FsActionProto::PERM_ALL,
];
values
}
fn enum_descriptor_static() -> &'static ::protobuf::reflect::EnumDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::EnumDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
::protobuf::reflect::EnumDescriptor::new_pb_name::<AclEntryProto_FsActionProto>("AclEntryProto.FsActionProto", file_descriptor_proto())
})
}
}
impl ::std::marker::Copy for AclEntryProto_FsActionProto {
}
impl ::std::default::Default for AclEntryProto_FsActionProto {
fn default() -> Self {
AclEntryProto_FsActionProto::NONE
}
}
impl ::protobuf::reflect::ProtobufValue for AclEntryProto_FsActionProto {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Enum(::protobuf::ProtobufEnum::descriptor(self))
}
}
#[derive(PartialEq,Clone,Default)]
pub struct AclStatusProto {
// message fields
owner: ::protobuf::SingularField<::std::string::String>,
group: ::protobuf::SingularField<::std::string::String>,
sticky: ::std::option::Option<bool>,
pub entries: ::protobuf::RepeatedField<AclEntryProto>,
pub permission: ::protobuf::SingularPtrField<FsPermissionProto>,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a AclStatusProto {
fn default() -> &'a AclStatusProto {
<AclStatusProto as ::protobuf::Message>::default_instance()
}
}
impl AclStatusProto {
pub fn new() -> AclStatusProto {
::std::default::Default::default()
}
// required string owner = 1;
pub fn get_owner(&self) -> &str {
match self.owner.as_ref() {
Some(v) => &v,
None => "",
}
}
pub fn clear_owner(&mut self) {
self.owner.clear();
}
pub fn has_owner(&self) -> bool {
self.owner.is_some()
}
// Param is passed by value, moved
pub fn set_owner(&mut self, v: ::std::string::String) {
self.owner = ::protobuf::SingularField::some(v);
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_owner(&mut self) -> &mut ::std::string::String {
if self.owner.is_none() {
self.owner.set_default();
}
self.owner.as_mut().unwrap()
}
// Take field
pub fn take_owner(&mut self) -> ::std::string::String {
self.owner.take().unwrap_or_else(|| ::std::string::String::new())
}
// required string group = 2;
pub fn get_group(&self) -> &str {
match self.group.as_ref() {
Some(v) => &v,
None => "",
}
}
pub fn clear_group(&mut self) {
self.group.clear();
}
pub fn has_group(&self) -> bool {
self.group.is_some()
}
// Param is passed by value, moved
pub fn set_group(&mut self, v: ::std::string::String) {
self.group = ::protobuf::SingularField::some(v);
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_group(&mut self) -> &mut ::std::string::String {
if self.group.is_none() {
self.group.set_default();
}
self.group.as_mut().unwrap()
}
// Take field
pub fn take_group(&mut self) -> ::std::string::String {
self.group.take().unwrap_or_else(|| ::std::string::String::new())
}
// required bool sticky = 3;
pub fn get_sticky(&self) -> bool {
self.sticky.unwrap_or(false)
}
pub fn clear_sticky(&mut self) {
self.sticky = ::std::option::Option::None;
}
pub fn has_sticky(&self) -> bool {
self.sticky.is_some()
}
// Param is passed by value, moved
pub fn set_sticky(&mut self, v: bool) {
self.sticky = ::std::option::Option::Some(v);
}
// repeated .hadoop.hdfs.AclEntryProto entries = 4;
pub fn get_entries(&self) -> &[AclEntryProto] {
&self.entries
}
pub fn clear_entries(&mut self) {
self.entries.clear();
}
// Param is passed by value, moved
pub fn set_entries(&mut self, v: ::protobuf::RepeatedField<AclEntryProto>) {
self.entries = v;
}
// Mutable pointer to the field.
pub fn mut_entries(&mut self) -> &mut ::protobuf::RepeatedField<AclEntryProto> {
&mut self.entries
}
// Take field
pub fn take_entries(&mut self) -> ::protobuf::RepeatedField<AclEntryProto> {
::std::mem::replace(&mut self.entries, ::protobuf::RepeatedField::new())
}
// optional .hadoop.hdfs.FsPermissionProto permission = 5;
pub fn get_permission(&self) -> &FsPermissionProto {
self.permission.as_ref().unwrap_or_else(|| <FsPermissionProto as ::protobuf::Message>::default_instance())
}
pub fn clear_permission(&mut self) {
self.permission.clear();
}
pub fn has_permission(&self) -> bool {
self.permission.is_some()
}
// Param is passed by value, moved
pub fn set_permission(&mut self, v: FsPermissionProto) {
self.permission = ::protobuf::SingularPtrField::some(v);
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_permission(&mut self) -> &mut FsPermissionProto {
if self.permission.is_none() {
self.permission.set_default();
}
self.permission.as_mut().unwrap()
}
// Take field
pub fn take_permission(&mut self) -> FsPermissionProto {
self.permission.take().unwrap_or_else(|| FsPermissionProto::new())
}
}
impl ::protobuf::Message for AclStatusProto {
fn is_initialized(&self) -> bool {
if self.owner.is_none() {
return false;
}
if self.group.is_none() {
return false;
}
if self.sticky.is_none() {
return false;
}
for v in &self.entries {
if !v.is_initialized() {
return false;
}
};
for v in &self.permission {
if !v.is_initialized() {
return false;
}
};
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_singular_string_into(wire_type, is, &mut self.owner)?;
},
2 => {
::protobuf::rt::read_singular_string_into(wire_type, is, &mut self.group)?;
},
3 => {
if wire_type != ::protobuf::wire_format::WireTypeVarint {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
}
let tmp = is.read_bool()?;
self.sticky = ::std::option::Option::Some(tmp);
},
4 => {
::protobuf::rt::read_repeated_message_into(wire_type, is, &mut self.entries)?;
},
5 => {
::protobuf::rt::read_singular_message_into(wire_type, is, &mut self.permission)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if let Some(ref v) = self.owner.as_ref() {
my_size += ::protobuf::rt::string_size(1, &v);
}
if let Some(ref v) = self.group.as_ref() {
my_size += ::protobuf::rt::string_size(2, &v);
}
if let Some(v) = self.sticky {
my_size += 2;
}
for value in &self.entries {
let len = value.compute_size();
my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len;
};
if let Some(ref v) = self.permission.as_ref() {
let len = v.compute_size();
my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len;
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if let Some(ref v) = self.owner.as_ref() {
os.write_string(1, &v)?;
}
if let Some(ref v) = self.group.as_ref() {
os.write_string(2, &v)?;
}
if let Some(v) = self.sticky {
os.write_bool(3, v)?;
}
for v in &self.entries {
os.write_tag(4, ::protobuf::wire_format::WireTypeLengthDelimited)?;
os.write_raw_varint32(v.get_cached_size())?;
v.write_to_with_cached_sizes(os)?;
};
if let Some(ref v) = self.permission.as_ref() {
os.write_tag(5, ::protobuf::wire_format::WireTypeLengthDelimited)?;
os.write_raw_varint32(v.get_cached_size())?;
v.write_to_with_cached_sizes(os)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> AclStatusProto {
AclStatusProto::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_singular_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"owner",
|m: &AclStatusProto| { &m.owner },
|m: &mut AclStatusProto| { &mut m.owner },
));
fields.push(::protobuf::reflect::accessor::make_singular_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"group",
|m: &AclStatusProto| { &m.group },
|m: &mut AclStatusProto| { &mut m.group },
));
fields.push(::protobuf::reflect::accessor::make_option_accessor::<_, ::protobuf::types::ProtobufTypeBool>(
"sticky",
|m: &AclStatusProto| { &m.sticky },
|m: &mut AclStatusProto| { &mut m.sticky },
));
fields.push(::protobuf::reflect::accessor::make_repeated_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage<AclEntryProto>>(
"entries",
|m: &AclStatusProto| { &m.entries },
|m: &mut AclStatusProto| { &mut m.entries },
));
fields.push(::protobuf::reflect::accessor::make_singular_ptr_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage<FsPermissionProto>>(
"permission",
|m: &AclStatusProto| { &m.permission },
|m: &mut AclStatusProto| { &mut m.permission },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<AclStatusProto>(
"AclStatusProto",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static AclStatusProto {
static instance: ::protobuf::rt::LazyV2<AclStatusProto> = ::protobuf::rt::LazyV2::INIT;
instance.get(AclStatusProto::new)
}
}
impl ::protobuf::Clear for AclStatusProto {
fn clear(&mut self) {
self.owner.clear();
self.group.clear();
self.sticky = ::std::option::Option::None;
self.entries.clear();
self.permission.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for AclStatusProto {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for AclStatusProto {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct ModifyAclEntriesRequestProto {
// message fields
src: ::protobuf::SingularField<::std::string::String>,
pub aclSpec: ::protobuf::RepeatedField<AclEntryProto>,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a ModifyAclEntriesRequestProto {
fn default() -> &'a ModifyAclEntriesRequestProto {
<ModifyAclEntriesRequestProto as ::protobuf::Message>::default_instance()
}
}
impl ModifyAclEntriesRequestProto {
pub fn new() -> ModifyAclEntriesRequestProto {
::std::default::Default::default()
}
// required string src = 1;
pub fn get_src(&self) -> &str {
match self.src.as_ref() {
Some(v) => &v,
None => "",
}
}
pub fn clear_src(&mut self) {
self.src.clear();
}
pub fn has_src(&self) -> bool {
self.src.is_some()
}
// Param is passed by value, moved
pub fn set_src(&mut self, v: ::std::string::String) {
self.src = ::protobuf::SingularField::some(v);
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_src(&mut self) -> &mut ::std::string::String {
if self.src.is_none() {
self.src.set_default();
}
self.src.as_mut().unwrap()
}
// Take field
pub fn take_src(&mut self) -> ::std::string::String {
self.src.take().unwrap_or_else(|| ::std::string::String::new())
}
// repeated .hadoop.hdfs.AclEntryProto aclSpec = 2;
pub fn get_aclSpec(&self) -> &[AclEntryProto] {
&self.aclSpec
}
pub fn clear_aclSpec(&mut self) {
self.aclSpec.clear();
}
// Param is passed by value, moved
pub fn set_aclSpec(&mut self, v: ::protobuf::RepeatedField<AclEntryProto>) {
self.aclSpec = v;
}
// Mutable pointer to the field.
pub fn mut_aclSpec(&mut self) -> &mut ::protobuf::RepeatedField<AclEntryProto> {
&mut self.aclSpec
}
// Take field
pub fn take_aclSpec(&mut self) -> ::protobuf::RepeatedField<AclEntryProto> {
::std::mem::replace(&mut self.aclSpec, ::protobuf::RepeatedField::new())
}
}
impl ::protobuf::Message for ModifyAclEntriesRequestProto {
fn is_initialized(&self) -> bool {
if self.src.is_none() {
return false;
}
for v in &self.aclSpec {
if !v.is_initialized() {
return false;
}
};
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_singular_string_into(wire_type, is, &mut self.src)?;
},
2 => {
::protobuf::rt::read_repeated_message_into(wire_type, is, &mut self.aclSpec)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if let Some(ref v) = self.src.as_ref() {
my_size += ::protobuf::rt::string_size(1, &v);
}
for value in &self.aclSpec {
let len = value.compute_size();
my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len;
};
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if let Some(ref v) = self.src.as_ref() {
os.write_string(1, &v)?;
}
for v in &self.aclSpec {
os.write_tag(2, ::protobuf::wire_format::WireTypeLengthDelimited)?;
os.write_raw_varint32(v.get_cached_size())?;
v.write_to_with_cached_sizes(os)?;
};
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> ModifyAclEntriesRequestProto {
ModifyAclEntriesRequestProto::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_singular_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"src",
|m: &ModifyAclEntriesRequestProto| { &m.src },
|m: &mut ModifyAclEntriesRequestProto| { &mut m.src },
));
fields.push(::protobuf::reflect::accessor::make_repeated_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage<AclEntryProto>>(
"aclSpec",
|m: &ModifyAclEntriesRequestProto| { &m.aclSpec },
|m: &mut ModifyAclEntriesRequestProto| { &mut m.aclSpec },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<ModifyAclEntriesRequestProto>(
"ModifyAclEntriesRequestProto",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static ModifyAclEntriesRequestProto {
static instance: ::protobuf::rt::LazyV2<ModifyAclEntriesRequestProto> = ::protobuf::rt::LazyV2::INIT;
instance.get(ModifyAclEntriesRequestProto::new)
}
}
impl ::protobuf::Clear for ModifyAclEntriesRequestProto {
fn clear(&mut self) {
self.src.clear();
self.aclSpec.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for ModifyAclEntriesRequestProto {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for ModifyAclEntriesRequestProto {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct ModifyAclEntriesResponseProto {
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a ModifyAclEntriesResponseProto {
fn default() -> &'a ModifyAclEntriesResponseProto {
<ModifyAclEntriesResponseProto as ::protobuf::Message>::default_instance()
}
}
impl ModifyAclEntriesResponseProto {
pub fn new() -> ModifyAclEntriesResponseProto {
::std::default::Default::default()
}
}
impl ::protobuf::Message for ModifyAclEntriesResponseProto {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> ModifyAclEntriesResponseProto {
ModifyAclEntriesResponseProto::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let fields = ::std::vec::Vec::new();
::protobuf::reflect::MessageDescriptor::new_pb_name::<ModifyAclEntriesResponseProto>(
"ModifyAclEntriesResponseProto",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static ModifyAclEntriesResponseProto {
static instance: ::protobuf::rt::LazyV2<ModifyAclEntriesResponseProto> = ::protobuf::rt::LazyV2::INIT;
instance.get(ModifyAclEntriesResponseProto::new)
}
}
impl ::protobuf::Clear for ModifyAclEntriesResponseProto {
fn clear(&mut self) {
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for ModifyAclEntriesResponseProto {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for ModifyAclEntriesResponseProto {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct RemoveAclRequestProto {
// message fields
src: ::protobuf::SingularField<::std::string::String>,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a RemoveAclRequestProto {
fn default() -> &'a RemoveAclRequestProto {
<RemoveAclRequestProto as ::protobuf::Message>::default_instance()
}
}
impl RemoveAclRequestProto {
pub fn new() -> RemoveAclRequestProto {
::std::default::Default::default()
}
// required string src = 1;
pub fn get_src(&self) -> &str {
match self.src.as_ref() {
Some(v) => &v,
None => "",
}
}
pub fn clear_src(&mut self) {
self.src.clear();
}
pub fn has_src(&self) -> bool {
self.src.is_some()
}
// Param is passed by value, moved
pub fn set_src(&mut self, v: ::std::string::String) |
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_src(&mut self) -> &mut ::std::string::String {
if self.src.is_none() {
self.src.set_default();
}
self.src.as_mut().unwrap()
}
// Take field
pub fn take_src(&mut self) -> ::std::string::String {
self.src.take().unwrap_or_else(|| ::std::string::String::new())
}
}
impl ::protobuf::Message for RemoveAclRequestProto {
fn is_initialized(&self) -> bool {
if self.src.is_none() {
return false;
}
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_singular_string_into(wire_type, is, &mut self.src)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if let Some(ref v) = self.src.as_ref() {
my_size += ::protobuf::rt::string_size(1, &v);
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if let Some(ref v) = self.src.as_ref() {
os.write_string(1, &v)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> RemoveAclRequestProto {
RemoveAclRequestProto::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_singular_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"src",
|m: &RemoveAclRequestProto| { &m.src },
|m: &mut RemoveAclRequestProto| { &mut m.src },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<RemoveAclRequestProto>(
"RemoveAclRequestProto",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static RemoveAclRequestProto {
static instance: ::protobuf::rt::LazyV2<RemoveAclRequestProto> = ::protobuf::rt::LazyV2::INIT;
instance.get(RemoveAclRequestProto::new)
}
}
impl ::protobuf::Clear for RemoveAclRequestProto {
fn clear(&mut self) {
self.src.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for RemoveAclRequestProto {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for RemoveAclRequestProto {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct RemoveAclResponseProto {
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a RemoveAclResponseProto {
fn default() -> &'a RemoveAclResponseProto {
<RemoveAclResponseProto as ::protobuf::Message>::default_instance()
}
}
impl RemoveAclResponseProto {
pub fn new() -> RemoveAclResponseProto {
::std::default::Default::default()
}
}
impl ::protobuf::Message for RemoveAclResponseProto {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> RemoveAclResponseProto {
RemoveAclResponseProto::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let fields = ::std::vec::Vec::new();
::protobuf::reflect::MessageDescriptor::new_pb_name::<RemoveAclResponseProto>(
"RemoveAclResponseProto",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static RemoveAclResponseProto {
static instance: ::protobuf::rt::LazyV2<RemoveAclResponseProto> = ::protobuf::rt::LazyV2::INIT;
instance.get(RemoveAclResponseProto::new)
}
}
impl ::protobuf::Clear for RemoveAclResponseProto {
fn clear(&mut self) {
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for RemoveAclResponseProto {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for RemoveAclResponseProto {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct RemoveAclEntriesRequestProto {
// message fields
src: ::protobuf::SingularField<::std::string::String>,
pub aclSpec: ::protobuf::RepeatedField<AclEntryProto>,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a RemoveAclEntriesRequestProto {
fn default() -> &'a RemoveAclEntriesRequestProto {
<RemoveAclEntriesRequestProto as ::protobuf::Message>::default_instance()
}
}
impl RemoveAclEntriesRequestProto {
pub fn new() -> RemoveAclEntriesRequestProto {
::std::default::Default::default()
}
// required string src = 1;
pub fn get_src(&self) -> &str {
match self.src.as_ref() {
Some(v) => &v,
None => "",
}
}
pub fn clear_src(&mut self) {
self.src.clear();
}
pub fn has_src(&self) -> bool {
self.src.is_some()
}
// Param is passed by value, moved
pub fn set_src(&mut self, v: ::std::string::String) {
self.src = ::protobuf::SingularField::some(v);
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_src(&mut self) -> &mut ::std::string::String {
if self.src.is_none() {
self.src.set_default();
}
self.src.as_mut().unwrap()
}
// Take field
pub fn take_src(&mut self) -> ::std::string::String {
self.src.take().unwrap_or_else(|| ::std::string::String::new())
}
// repeated .hadoop.hdfs.AclEntryProto aclSpec = 2;
pub fn get_aclSpec(&self) -> &[AclEntryProto] {
&self.aclSpec
}
pub fn clear_aclSpec(&mut self) {
self.aclSpec.clear();
}
// Param is passed by value, moved
pub fn set_aclSpec(&mut self, v: ::protobuf::RepeatedField<AclEntryProto>) {
self.aclSpec = v;
}
// Mutable pointer to the field.
pub fn mut_aclSpec(&mut self) -> &mut ::protobuf::RepeatedField<AclEntryProto> {
&mut self.aclSpec
}
// Take field
pub fn take_aclSpec(&mut self) -> ::protobuf::RepeatedField<AclEntryProto> {
::std::mem::replace(&mut self.aclSpec, ::protobuf::RepeatedField::new())
}
}
impl ::protobuf::Message for RemoveAclEntriesRequestProto {
fn is_initialized(&self) -> bool {
if self.src.is_none() {
return false;
}
for v in &self.aclSpec {
if !v.is_initialized() {
return false;
}
};
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_singular_string_into(wire_type, is, &mut self.src)?;
},
2 => {
::protobuf::rt::read_repeated_message_into(wire_type, is, &mut self.aclSpec)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if let Some(ref v) = self.src.as_ref() {
my_size += ::protobuf::rt::string_size(1, &v);
}
for value in &self.aclSpec {
let len = value.compute_size();
my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len;
};
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if let Some(ref v) = self.src.as_ref() {
os.write_string(1, &v)?;
}
for v in &self.aclSpec {
os.write_tag(2, ::protobuf::wire_format::WireTypeLengthDelimited)?;
os.write_raw_varint32(v.get_cached_size())?;
v.write_to_with_cached_sizes(os)?;
};
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> RemoveAclEntriesRequestProto {
RemoveAclEntriesRequestProto::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_singular_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"src",
|m: &RemoveAclEntriesRequestProto| { &m.src },
|m: &mut RemoveAclEntriesRequestProto| { &mut m.src },
));
fields.push(::protobuf::reflect::accessor::make_repeated_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage<AclEntryProto>>(
"aclSpec",
|m: &RemoveAclEntriesRequestProto| { &m.aclSpec },
|m: &mut RemoveAclEntriesRequestProto| { &mut m.aclSpec },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<RemoveAclEntriesRequestProto>(
"RemoveAclEntriesRequestProto",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static RemoveAclEntriesRequestProto {
static instance: ::protobuf::rt::LazyV2<RemoveAclEntriesRequestProto> = ::protobuf::rt::LazyV2::INIT;
instance.get(RemoveAclEntriesRequestProto::new)
}
}
impl ::protobuf::Clear for RemoveAclEntriesRequestProto {
fn clear(&mut self) {
self.src.clear();
self.aclSpec.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for RemoveAclEntriesRequestProto {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for RemoveAclEntriesRequestProto {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct RemoveAclEntriesResponseProto {
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a RemoveAclEntriesResponseProto {
fn default() -> &'a RemoveAclEntriesResponseProto {
<RemoveAclEntriesResponseProto as ::protobuf::Message>::default_instance()
}
}
impl RemoveAclEntriesResponseProto {
pub fn new() -> RemoveAclEntriesResponseProto {
::std::default::Default::default()
}
}
impl ::protobuf::Message for RemoveAclEntriesResponseProto {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> RemoveAclEntriesResponseProto {
RemoveAclEntriesResponseProto::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let fields = ::std::vec::Vec::new();
::protobuf::reflect::MessageDescriptor::new_pb_name::<RemoveAclEntriesResponseProto>(
"RemoveAclEntriesResponseProto",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static RemoveAclEntriesResponseProto {
static instance: ::protobuf::rt::LazyV2<RemoveAclEntriesResponseProto> = ::protobuf::rt::LazyV2::INIT;
instance.get(RemoveAclEntriesResponseProto::new)
}
}
impl ::protobuf::Clear for RemoveAclEntriesResponseProto {
fn clear(&mut self) {
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for RemoveAclEntriesResponseProto {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for RemoveAclEntriesResponseProto {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct RemoveDefaultAclRequestProto {
// message fields
src: ::protobuf::SingularField<::std::string::String>,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a RemoveDefaultAclRequestProto {
fn default() -> &'a RemoveDefaultAclRequestProto {
<RemoveDefaultAclRequestProto as ::protobuf::Message>::default_instance()
}
}
impl RemoveDefaultAclRequestProto {
pub fn new() -> RemoveDefaultAclRequestProto {
::std::default::Default::default()
}
// required string src = 1;
pub fn get_src(&self) -> &str {
match self.src.as_ref() {
Some(v) => &v,
None => "",
}
}
pub fn clear_src(&mut self) {
self.src.clear();
}
pub fn has_src(&self) -> bool {
self.src.is_some()
}
// Param is passed by value, moved
pub fn set_src(&mut self, v: ::std::string::String) {
self.src = ::protobuf::SingularField::some(v);
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_src(&mut self) -> &mut ::std::string::String {
if self.src.is_none() {
self.src.set_default();
}
self.src.as_mut().unwrap()
}
// Take field
pub fn take_src(&mut self) -> ::std::string::String {
self.src.take().unwrap_or_else(|| ::std::string::String::new())
}
}
impl ::protobuf::Message for RemoveDefaultAclRequestProto {
fn is_initialized(&self) -> bool {
if self.src.is_none() {
return false;
}
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_singular_string_into(wire_type, is, &mut self.src)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if let Some(ref v) = self.src.as_ref() {
my_size += ::protobuf::rt::string_size(1, &v);
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if let Some(ref v) = self.src.as_ref() {
os.write_string(1, &v)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> RemoveDefaultAclRequestProto {
RemoveDefaultAclRequestProto::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_singular_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"src",
|m: &RemoveDefaultAclRequestProto| { &m.src },
|m: &mut RemoveDefaultAclRequestProto| { &mut m.src },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<RemoveDefaultAclRequestProto>(
"RemoveDefaultAclRequestProto",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static RemoveDefaultAclRequestProto {
static instance: ::protobuf::rt::LazyV2<RemoveDefaultAclRequestProto> = ::protobuf::rt::LazyV2::INIT;
instance.get(RemoveDefaultAclRequestProto::new)
}
}
impl ::protobuf::Clear for RemoveDefaultAclRequestProto {
fn clear(&mut self) {
self.src.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for RemoveDefaultAclRequestProto {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for RemoveDefaultAclRequestProto {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct RemoveDefaultAclResponseProto {
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a RemoveDefaultAclResponseProto {
fn default() -> &'a RemoveDefaultAclResponseProto {
<RemoveDefaultAclResponseProto as ::protobuf::Message>::default_instance()
}
}
impl RemoveDefaultAclResponseProto {
pub fn new() -> RemoveDefaultAclResponseProto {
::std::default::Default::default()
}
}
impl ::protobuf::Message for RemoveDefaultAclResponseProto {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> RemoveDefaultAclResponseProto {
RemoveDefaultAclResponseProto::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let fields = ::std::vec::Vec::new();
::protobuf::reflect::MessageDescriptor::new_pb_name::<RemoveDefaultAclResponseProto>(
"RemoveDefaultAclResponseProto",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static RemoveDefaultAclResponseProto {
static instance: ::protobuf::rt::LazyV2<RemoveDefaultAclResponseProto> = ::protobuf::rt::LazyV2::INIT;
instance.get(RemoveDefaultAclResponseProto::new)
}
}
impl ::protobuf::Clear for RemoveDefaultAclResponseProto {
fn clear(&mut self) {
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for RemoveDefaultAclResponseProto {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for RemoveDefaultAclResponseProto {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct SetAclRequestProto {
// message fields
src: ::protobuf::SingularField<::std::string::String>,
pub aclSpec: ::protobuf::RepeatedField<AclEntryProto>,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a SetAclRequestProto {
fn default() -> &'a SetAclRequestProto {
<SetAclRequestProto as ::protobuf::Message>::default_instance()
}
}
impl SetAclRequestProto {
pub fn new() -> SetAclRequestProto {
::std::default::Default::default()
}
// required string src = 1;
pub fn get_src(&self) -> &str {
match self.src.as_ref() {
Some(v) => &v,
None => "",
}
}
pub fn clear_src(&mut self) {
self.src.clear();
}
pub fn has_src(&self) -> bool {
self.src.is_some()
}
// Param is passed by value, moved
pub fn set_src(&mut self, v: ::std::string::String) {
self.src = ::protobuf::SingularField::some(v);
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_src(&mut self) -> &mut ::std::string::String {
if self.src.is_none() {
self.src.set_default();
}
self.src.as_mut().unwrap()
}
// Take field
pub fn take_src(&mut self) -> ::std::string::String {
self.src.take().unwrap_or_else(|| ::std::string::String::new())
}
// repeated .hadoop.hdfs.AclEntryProto aclSpec = 2;
pub fn get_aclSpec(&self) -> &[AclEntryProto] {
&self.aclSpec
}
pub fn clear_aclSpec(&mut self) {
self.aclSpec.clear();
}
// Param is passed by value, moved
pub fn set_aclSpec(&mut self, v: ::protobuf::RepeatedField<AclEntryProto>) {
self.aclSpec = v;
}
// Mutable pointer to the field.
pub fn mut_aclSpec(&mut self) -> &mut ::protobuf::RepeatedField<AclEntryProto> {
&mut self.aclSpec
}
// Take field
pub fn take_aclSpec(&mut self) -> ::protobuf::RepeatedField<AclEntryProto> {
::std::mem::replace(&mut self.aclSpec, ::protobuf::RepeatedField::new())
}
}
impl ::protobuf::Message for SetAclRequestProto {
fn is_initialized(&self) -> bool {
if self.src.is_none() {
return false;
}
for v in &self.aclSpec {
if !v.is_initialized() {
return false;
}
};
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_singular_string_into(wire_type, is, &mut self.src)?;
},
2 => {
::protobuf::rt::read_repeated_message_into(wire_type, is, &mut self.aclSpec)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if let Some(ref v) = self.src.as_ref() {
my_size += ::protobuf::rt::string_size(1, &v);
}
for value in &self.aclSpec {
let len = value.compute_size();
my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len;
};
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if let Some(ref v) = self.src.as_ref() {
os.write_string(1, &v)?;
}
for v in &self.aclSpec {
os.write_tag(2, ::protobuf::wire_format::WireTypeLengthDelimited)?;
os.write_raw_varint32(v.get_cached_size())?;
v.write_to_with_cached_sizes(os)?;
};
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> SetAclRequestProto {
SetAclRequestProto::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_singular_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"src",
|m: &SetAclRequestProto| { &m.src },
|m: &mut SetAclRequestProto| { &mut m.src },
));
fields.push(::protobuf::reflect::accessor::make_repeated_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage<AclEntryProto>>(
"aclSpec",
|m: &SetAclRequestProto| { &m.aclSpec },
|m: &mut SetAclRequestProto| { &mut m.aclSpec },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<SetAclRequestProto>(
"SetAclRequestProto",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static SetAclRequestProto {
static instance: ::protobuf::rt::LazyV2<SetAclRequestProto> = ::protobuf::rt::LazyV2::INIT;
instance.get(SetAclRequestProto::new)
}
}
impl ::protobuf::Clear for SetAclRequestProto {
fn clear(&mut self) {
self.src.clear();
self.aclSpec.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for SetAclRequestProto {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for SetAclRequestProto {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct SetAclResponseProto {
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a SetAclResponseProto {
fn default() -> &'a SetAclResponseProto {
<SetAclResponseProto as ::protobuf::Message>::default_instance()
}
}
impl SetAclResponseProto {
pub fn new() -> SetAclResponseProto {
::std::default::Default::default()
}
}
impl ::protobuf::Message for SetAclResponseProto {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> SetAclResponseProto {
SetAclResponseProto::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let fields = ::std::vec::Vec::new();
::protobuf::reflect::MessageDescriptor::new_pb_name::<SetAclResponseProto>(
"SetAclResponseProto",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static SetAclResponseProto {
static instance: ::protobuf::rt::LazyV2<SetAclResponseProto> = ::protobuf::rt::LazyV2::INIT;
instance.get(SetAclResponseProto::new)
}
}
impl ::protobuf::Clear for SetAclResponseProto {
fn clear(&mut self) {
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for SetAclResponseProto {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for SetAclResponseProto {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct GetAclStatusRequestProto {
// message fields
src: ::protobuf::SingularField<::std::string::String>,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a GetAclStatusRequestProto {
fn default() -> &'a GetAclStatusRequestProto {
<GetAclStatusRequestProto as ::protobuf::Message>::default_instance()
}
}
impl GetAclStatusRequestProto {
pub fn new() -> GetAclStatusRequestProto {
::std::default::Default::default()
}
// required string src = 1;
pub fn get_src(&self) -> &str {
match self.src.as_ref() {
Some(v) => &v,
None => "",
}
}
pub fn clear_src(&mut self) {
self.src.clear();
}
pub fn has_src(&self) -> bool {
self.src.is_some()
}
// Param is passed by value, moved
pub fn set_src(&mut self, v: ::std::string::String) {
self.src = ::protobuf::SingularField::some(v);
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_src(&mut self) -> &mut ::std::string::String {
if self.src.is_none() {
self.src.set_default();
}
self.src.as_mut().unwrap()
}
// Take field
pub fn take_src(&mut self) -> ::std::string::String {
self.src.take().unwrap_or_else(|| ::std::string::String::new())
}
}
impl ::protobuf::Message for GetAclStatusRequestProto {
fn is_initialized(&self) -> bool {
if self.src.is_none() {
return false;
}
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_singular_string_into(wire_type, is, &mut self.src)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if let Some(ref v) = self.src.as_ref() {
my_size += ::protobuf::rt::string_size(1, &v);
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if let Some(ref v) = self.src.as_ref() {
os.write_string(1, &v)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> GetAclStatusRequestProto {
GetAclStatusRequestProto::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_singular_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"src",
|m: &GetAclStatusRequestProto| { &m.src },
|m: &mut GetAclStatusRequestProto| { &mut m.src },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<GetAclStatusRequestProto>(
"GetAclStatusRequestProto",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static GetAclStatusRequestProto {
static instance: ::protobuf::rt::LazyV2<GetAclStatusRequestProto> = ::protobuf::rt::LazyV2::INIT;
instance.get(GetAclStatusRequestProto::new)
}
}
impl ::protobuf::Clear for GetAclStatusRequestProto {
fn clear(&mut self) {
self.src.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for GetAclStatusRequestProto {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for GetAclStatusRequestProto {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct GetAclStatusResponseProto {
// message fields
pub result: ::protobuf::SingularPtrField<AclStatusProto>,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a GetAclStatusResponseProto {
fn default() -> &'a GetAclStatusResponseProto {
<GetAclStatusResponseProto as ::protobuf::Message>::default_instance()
}
}
impl GetAclStatusResponseProto {
pub fn new() -> GetAclStatusResponseProto {
::std::default::Default::default()
}
// required .hadoop.hdfs.AclStatusProto result = 1;
pub fn get_result(&self) -> &AclStatusProto {
self.result.as_ref().unwrap_or_else(|| <AclStatusProto as ::protobuf::Message>::default_instance())
}
pub fn clear_result(&mut self) {
self.result.clear();
}
pub fn has_result(&self) -> bool {
self.result.is_some()
}
// Param is passed by value, moved
pub fn set_result(&mut self, v: AclStatusProto) {
self.result = ::protobuf::SingularPtrField::some(v);
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_result(&mut self) -> &mut AclStatusProto {
if self.result.is_none() {
self.result.set_default();
}
self.result.as_mut().unwrap()
}
// Take field
pub fn take_result(&mut self) -> AclStatusProto {
self.result.take().unwrap_or_else(|| AclStatusProto::new())
}
}
impl ::protobuf::Message for GetAclStatusResponseProto {
fn is_initialized(&self) -> bool {
if self.result.is_none() {
return false;
}
for v in &self.result {
if !v.is_initialized() {
return false;
}
};
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_singular_message_into(wire_type, is, &mut self.result)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if let Some(ref v) = self.result.as_ref() {
let len = v.compute_size();
my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len;
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if let Some(ref v) = self.result.as_ref() {
os.write_tag(1, ::protobuf::wire_format::WireTypeLengthDelimited)?;
os.write_raw_varint32(v.get_cached_size())?;
v.write_to_with_cached_sizes(os)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> GetAclStatusResponseProto {
GetAclStatusResponseProto::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_singular_ptr_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage<AclStatusProto>>(
"result",
|m: &GetAclStatusResponseProto| { &m.result },
|m: &mut GetAclStatusResponseProto| { &mut m.result },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<GetAclStatusResponseProto>(
"GetAclStatusResponseProto",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static GetAclStatusResponseProto {
static instance: ::protobuf::rt::LazyV2<GetAclStatusResponseProto> = ::protobuf::rt::LazyV2::INIT;
instance.get(GetAclStatusResponseProto::new)
}
}
impl ::protobuf::Clear for GetAclStatusResponseProto {
fn clear(&mut self) {
self.result.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for GetAclStatusResponseProto {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for GetAclStatusResponseProto {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
static file_descriptor_proto_data: &'static [u8] = b"\
\n\tacl.proto\x12\x0bhadoop.hdfs\"'\n\x11FsPermissionProto\x12\x12\n\x04\
perm\x18\x01\x20\x02(\rR\x04perm\"\xe4\x03\n\rAclEntryProto\x12@\n\x04ty\
pe\x18\x01\x20\x02(\x0e2,.hadoop.hdfs.AclEntryProto.AclEntryTypeProtoR\
\x04type\x12C\n\x05scope\x18\x02\x20\x02(\x0e2-.hadoop.hdfs.AclEntryProt\
o.AclEntryScopeProtoR\x05scope\x12J\n\x0bpermissions\x18\x03\x20\x02(\
\x0e2(.hadoop.hdfs.AclEntryProto.FsActionProtoR\x0bpermissions\x12\x12\n\
\x04name\x18\x04\x20\x01(\tR\x04name\"-\n\x12AclEntryScopeProto\x12\n\n\
\x06ACCESS\x10\0\x12\x0b\n\x07DEFAULT\x10\x01\"=\n\x11AclEntryTypeProto\
\x12\x08\n\x04USER\x10\0\x12\t\n\x05GROUP\x10\x01\x12\x08\n\x04MASK\x10\
\x02\x12\t\n\x05OTHER\x10\x03\"~\n\rFsActionProto\x12\x08\n\x04NONE\x10\
\0\x12\x0b\n\x07EXECUTE\x10\x01\x12\t\n\x05WRITE\x10\x02\x12\x11\n\rWRIT\
E_EXECUTE\x10\x03\x12\x08\n\x04READ\x10\x04\x12\x10\n\x0cREAD_EXECUTE\
\x10\x05\x12\x0e\n\nREAD_WRITE\x10\x06\x12\x0c\n\x08PERM_ALL\x10\x07\"\
\xca\x01\n\x0eAclStatusProto\x12\x14\n\x05owner\x18\x01\x20\x02(\tR\x05o\
wner\x12\x14\n\x05group\x18\x02\x20\x02(\tR\x05group\x12\x16\n\x06sticky\
\x18\x03\x20\x02(\x08R\x06sticky\x124\n\x07entries\x18\x04\x20\x03(\x0b2\
\x1a.hadoop.hdfs.AclEntryProtoR\x07entries\x12>\n\npermission\x18\x05\
\x20\x01(\x0b2\x1e.hadoop.hdfs.FsPermissionProtoR\npermission\"f\n\x1cMo\
difyAclEntriesRequestProto\x12\x10\n\x03src\x18\x01\x20\x02(\tR\x03src\
\x124\n\x07aclSpec\x18\x02\x20\x03(\x0b2\x1a.hadoop.hdfs.AclEntryProtoR\
\x07aclSpec\"\x1f\n\x1dModifyAclEntriesResponseProto\")\n\x15RemoveAclRe\
questProto\x12\x10\n\x03src\x18\x01\x20\x02(\tR\x03src\"\x18\n\x16Remove\
AclResponseProto\"f\n\x1cRemoveAclEntriesRequestProto\x12\x10\n\x03src\
\x18\x01\x20\x02(\tR\x03src\x124\n\x07aclSpec\x18\x02\x20\x03(\x0b2\x1a.\
hadoop.hdfs.AclEntryProtoR\x07aclSpec\"\x1f\n\x1dRemoveAclEntriesRespons\
eProto\"0\n\x1cRemoveDefaultAclRequestProto\x12\x10\n\x03src\x18\x01\x20\
\x02(\tR\x03src\"\x1f\n\x1dRemoveDefaultAclResponseProto\"\\\n\x12SetAcl\
RequestProto\x12\x10\n\x03src\x18\x01\x20\x02(\tR\x03src\x124\n\x07aclSp\
ec\x18\x02\x20\x03(\x0b2\x1a.hadoop.hdfs.AclEntryProtoR\x07aclSpec\"\x15\
\n\x13SetAclResponseProto\",\n\x18GetAclStatusRequestProto\x12\x10\n\x03\
src\x18\x01\x20\x02(\tR\x03src\"P\n\x19GetAclStatusResponseProto\x123\n\
\x06result\x18\x01\x20\x02(\x0b2\x1b.hadoop.hdfs.AclStatusProtoR\x06resu\
ltB5\n%org.apache.hadoop.hdfs.protocol.protoB\tAclProtos\xa0\x01\x01\
";
static file_descriptor_proto_lazy: ::protobuf::rt::LazyV2<::protobuf::descriptor::FileDescriptorProto> = ::protobuf::rt::LazyV2::INIT;
fn parse_descriptor_proto() -> ::protobuf::descriptor::FileDescriptorProto {
::protobuf::Message::parse_from_bytes(file_descriptor_proto_data).unwrap()
}
pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {
file_descriptor_proto_lazy.get(|| {
parse_descriptor_proto()
})
}
| {
self.src = ::protobuf::SingularField::some(v);
} |
instance_in_model.py | from models import User
| user = User(name='jack')
if user in User:
print 'Some one in table is named jack' | |
TitleBar.tsx | import React from 'react';
import { RaceDistance } from '../defy/models';
import styled from 'styled-components';
import { humanizeDuration } from '../defy/models'
interface Props {
distance: RaceDistance,
time: number,
}
const H2 = styled.h2`
width: 100%;
text-align: center;
`
const TitleBar: React.FC<Props> = ({ distance, time }) => {
return (
<H2>{distance.name} in {humanizeDuration(time, { leadingZeroes: false, padHours: false, padMinutes: true, padSeconds: true })}</H2>
) | }
export default TitleBar |
|
node_fs.ts | import file = require('./file');
import api_error = require('./api_error');
import file_system = require('./file_system');
import file_flag = require('./file_flag');
import buffer = require('./buffer');
import node_path = require('./node_path');
import node_fs_stats = require('./node_fs_stats');
var ApiError = api_error.ApiError;
var ErrorCode = api_error.ErrorCode;
var FileFlag = file_flag.FileFlag;
var Buffer = buffer.Buffer;
var path = node_path.path;
declare var __numWaiting: number;
declare var setImmediate: (cb: Function) => void;
/**
* Wraps a callback with a setImmediate call.
* @param [Function] cb The callback to wrap.
* @param [Number] numArgs The number of arguments that the callback takes.
* @return [Function] The wrapped callback.
*/
function wrapCb(cb: Function, numArgs: number): Function {
if (typeof cb !== 'function') {
throw new ApiError(ErrorCode.EINVAL, 'Callback must be a function.');
}
// @todo This is used for unit testing. Maybe we should inject this logic
// dynamically rather than bundle it in 'production' code.
if (typeof __numWaiting === 'undefined') {
__numWaiting = 0;
}
__numWaiting++;
// We could use `arguments`, but Function.call/apply is expensive. And we only
// need to handle 1-3 arguments
switch (numArgs) {
case 1:
return function(arg1) {
setImmediate(function() {
__numWaiting--;
return cb(arg1);
});
};
case 2:
return function(arg1, arg2) {
setImmediate(function() {
__numWaiting--;
return cb(arg1, arg2);
});
};
case 3:
return function(arg1, arg2, arg3) {
setImmediate(function() {
__numWaiting--;
return cb(arg1, arg2, arg3);
});
};
default:
throw new Error('Invalid invocation of wrapCb.');
}
}
/**
* Checks if the fd is valid.
* @param [BrowserFS.File] fd A file descriptor (in BrowserFS, it's a File object)
* @return [Boolean, BrowserFS.ApiError] Returns `true` if the FD is OK,
* otherwise returns an ApiError.
*/
function checkFd(fd: file.File): void {
if (typeof fd['write'] !== 'function') {
throw new ApiError(ErrorCode.EBADF, 'Invalid file descriptor.');
}
}
function normalizeMode(mode: any, def: number): number {
switch(typeof mode) {
case 'number':
// (path, flag, mode, cb?)
return mode;
case 'string':
// (path, flag, modeString, cb?)
var trueMode = parseInt(mode, 8);
if (trueMode !== NaN) {
return trueMode;
}
// FALL THROUGH if mode is an invalid string!
default:
return def;
}
}
function normalizePath(p: string): string {
// Node doesn't allow null characters in paths.
if (p.indexOf('\u0000') >= 0) {
throw new ApiError(ErrorCode.EINVAL, 'Path must be a string without null bytes.');
} else if (p === '') {
throw new ApiError(ErrorCode.EINVAL, 'Path must not be empty.');
}
return path.resolve(p);
}
function normalizeOptions(options: any, defEnc: string, defFlag: string, defMode: number): {encoding: string; flag: string; mode: number} {
switch (typeof options) {
case 'object':
return {
encoding: typeof options['encoding'] !== 'undefined' ? options['encoding'] : defEnc,
flag: typeof options['flag'] !== 'undefined' ? options['flag'] : defFlag,
mode: normalizeMode(options['mode'], defMode)
};
case 'string':
return {
encoding: options,
flag: defFlag,
mode: defMode
};
default:
return {
encoding: defEnc,
flag: defFlag,
mode: defMode
};
}
}
// The default callback is a NOP.
function nopCb() {};
/**
* The node frontend to all filesystems.
* This layer handles:
*
* * Sanity checking inputs.
* * Normalizing paths.
* * Resetting stack depth for asynchronous operations which may not go through
* the browser by wrapping all input callbacks using `setImmediate`.
* * Performing the requested operation through the filesystem or the file
* descriptor, as appropriate.
* * Handling optional arguments and setting default arguments.
* @see http://nodejs.org/api/fs.html
* @class
*/
export class fs {
private static root: file_system.FileSystem = null;
public static _initialize(rootFS: file_system.FileSystem): file_system.FileSystem {
if (!(<any> rootFS).constructor.isAvailable()) {
throw new ApiError(ErrorCode.EINVAL, 'Tried to instantiate BrowserFS with an unavailable file system.');
}
return fs.root = rootFS;
}
/**
* converts Date or number to a fractional UNIX timestamp
* Grabbed from NodeJS sources (lib/fs.js)
*/
public static _toUnixTimestamp(time: Date);
public static _toUnixTimestamp(time: number);
public static _toUnixTimestamp(time: any): number {
if (typeof time === 'number') {
return time;
} else if (time instanceof Date) {
return time.getTime() / 1000;
}
throw new Error("Cannot parse time: " + time);
}
/**
* **NONSTANDARD**: Grab the FileSystem instance that backs this API.
* @return [BrowserFS.FileSystem | null] Returns null if the file system has
* not been initialized.
*/
public static getRootFS(): file_system.FileSystem {
if (fs.root) {
return fs.root;
} else {
return null;
}
}
// FILE OR DIRECTORY METHODS
/**
* Asynchronous rename. No arguments other than a possible exception are given
* to the completion callback.
* @param [String] oldPath
* @param [String] newPath
* @param [Function(BrowserFS.ApiError)] callback
*/
public static rename(oldPath: string, newPath: string, cb: (err?: api_error.ApiError) => void = nopCb): void {
var newCb = <(err?: api_error.ApiError) => void> wrapCb(cb, 1);
try {
fs.root.rename(normalizePath(oldPath), normalizePath(newPath), newCb);
} catch (e) {
newCb(e);
}
}
/**
* Synchronous rename.
* @param [String] oldPath
* @param [String] newPath
*/
public static renameSync(oldPath: string, newPath: string): void {
fs.root.renameSync(normalizePath(oldPath), normalizePath(newPath));
}
/**
* Test whether or not the given path exists by checking with the file system.
* Then call the callback argument with either true or false.
* @example Sample invocation
* fs.exists('/etc/passwd', function (exists) {
* util.debug(exists ? "it's there" : "no passwd!");
* });
* @param [String] path
* @param [Function(Boolean)] callback
*/
public static exists(path: string, cb: (exists: boolean) => void = nopCb): void {
var newCb = <(exists: boolean) => void> wrapCb(cb, 1);
try {
return fs.root.exists(normalizePath(path), newCb);
} catch (e) {
// Doesn't return an error. If something bad happens, we assume it just
// doesn't exist.
return newCb(false);
}
}
/**
* Test whether or not the given path exists by checking with the file system.
* @param [String] path
* @return [boolean]
*/
public static existsSync(path: string): boolean {
try {
return fs.root.existsSync(normalizePath(path));
} catch (e) {
// Doesn't return an error. If something bad happens, we assume it just
// doesn't exist.
return false;
}
}
/**
* Asynchronous `stat`.
* @param [String] path
* @param [Function(BrowserFS.ApiError, BrowserFS.node.fs.Stats)] callback
*/
public static stat(path: string, cb: (err: api_error.ApiError, stats?: node_fs_stats.Stats) => any = nopCb): void {
var newCb = <(err: api_error.ApiError, stats?: node_fs_stats.Stats) => any> wrapCb(cb, 2);
try {
return fs.root.stat(normalizePath(path), false, newCb);
} catch (e) {
return newCb(e, null);
}
}
/**
* Synchronous `stat`.
* @param [String] path
* @return [BrowserFS.node.fs.Stats]
*/
public static statSync(path: string): node_fs_stats.Stats {
return fs.root.statSync(normalizePath(path), false);
}
/**
* Asynchronous `lstat`.
* `lstat()` is identical to `stat()`, except that if path is a symbolic link,
* then the link itself is stat-ed, not the file that it refers to.
* @param [String] path
* @param [Function(BrowserFS.ApiError, BrowserFS.node.fs.Stats)] callback
*/
public static lstat(path: string, cb: (err: api_error.ApiError, stats?: node_fs_stats.Stats) => any = nopCb): void {
var newCb = <(err: api_error.ApiError, stats?: node_fs_stats.Stats) => any> wrapCb(cb, 2);
try {
return fs.root.stat(normalizePath(path), true, newCb);
} catch (e) {
return newCb(e, null);
}
}
/**
* Synchronous `lstat`.
* `lstat()` is identical to `stat()`, except that if path is a symbolic link,
* then the link itself is stat-ed, not the file that it refers to.
* @param [String] path
* @return [BrowserFS.node.fs.Stats]
*/
public static lstatSync(path: string): node_fs_stats.Stats {
return fs.root.statSync(normalizePath(path), true);
}
// FILE-ONLY METHODS
/**
* Asynchronous `truncate`.
* @param [String] path
* @param [Number] len
* @param [Function(BrowserFS.ApiError)] callback
*/
public static truncate(path: string, cb?: Function): void;
public static truncate(path: string, len: number, cb?: Function): void;
public static truncate(path: string, arg2: any = 0, cb: Function = nopCb): void {
var len = 0;
if (typeof arg2 === 'function') {
cb = arg2;
} else if (typeof arg2 === 'number') {
len = arg2;
}
var newCb = wrapCb(cb, 1);
try {
if (len < 0) {
throw new ApiError(ErrorCode.EINVAL);
}
return fs.root.truncate(normalizePath(path), len, newCb);
} catch (e) {
return newCb(e);
}
}
/**
* Synchronous `truncate`.
* @param [String] path
* @param [Number] len
*/
public static truncateSync(path: string, len: number = 0): void {
if (len < 0) {
throw new ApiError(ErrorCode.EINVAL);
}
return fs.root.truncateSync(normalizePath(path), len);
}
/**
* Asynchronous `unlink`.
* @param [String] path
* @param [Function(BrowserFS.ApiError)] callback
*/
public static unlink(path: string, cb: Function = nopCb): void {
var newCb = wrapCb(cb, 1);
try {
return fs.root.unlink(normalizePath(path), newCb);
} catch (e) {
return newCb(e);
}
}
/**
* Synchronous `unlink`.
* @param [String] path
*/
public static unlinkSync(path: string): void {
return fs.root.unlinkSync(normalizePath(path));
}
/**
* Asynchronous file open.
* Exclusive mode ensures that path is newly created.
*
* `flags` can be:
*
* * `'r'` - Open file for reading. An exception occurs if the file does not exist.
* * `'r+'` - Open file for reading and writing. An exception occurs if the file does not exist.
* * `'rs'` - Open file for reading in synchronous mode. Instructs the filesystem to not cache writes.
* * `'rs+'` - Open file for reading and writing, and opens the file in synchronous mode.
* * `'w'` - Open file for writing. The file is created (if it does not exist) or truncated (if it exists).
* * `'wx'` - Like 'w' but opens the file in exclusive mode.
* * `'w+'` - Open file for reading and writing. The file is created (if it does not exist) or truncated (if it exists).
* * `'wx+'` - Like 'w+' but opens the file in exclusive mode.
* * `'a'` - Open file for appending. The file is created if it does not exist.
* * `'ax'` - Like 'a' but opens the file in exclusive mode.
* * `'a+'` - Open file for reading and appending. The file is created if it does not exist.
* * `'ax+'` - Like 'a+' but opens the file in exclusive mode.
*
* @see http://www.manpagez.com/man/2/open/
* @param [String] path
* @param [String] flags
* @param [Number?] mode defaults to `0644`
* @param [Function(BrowserFS.ApiError, BrowserFS.File)] callback
*/
public static open(path: string, flag: string, cb?: (err: api_error.ApiError, fd?: file.File) => any): void;
public static open(path: string, flag: string, mode: string, cb?: (err: api_error.ApiError, fd?: file.File) => any): void;
public static open(path: string, flag: string, mode: number, cb?: (err: api_error.ApiError, fd?: file.File) => any): void;
public static open(path: string, flag: string, arg2?: any, cb: (err: api_error.ApiError, fd?: file.File) => any = nopCb): void {
var mode = normalizeMode(arg2, 0x1a4);
cb = typeof arg2 === 'function' ? arg2 : cb;
var newCb = <(err: api_error.ApiError, fd?: file.File) => any> wrapCb(cb, 2);
try {
return fs.root.open(normalizePath(path), FileFlag.getFileFlag(flag), mode, newCb);
} catch (e) {
return newCb(e, null);
}
}
/**
* Synchronous file open.
* @see http://www.manpagez.com/man/2/open/
* @param [String] path
* @param [String] flags
* @param [Number?] mode defaults to `0644`
* @return [BrowserFS.File]
*/
public static openSync(path: string, flag: string, mode?: string): file.File;
public static openSync(path: string, flag: string, mode?: number): file.File;
public static openSync(path: string, flag: string, mode: any = 0x1a4): file.File {
return fs.root.openSync(normalizePath(path), FileFlag.getFileFlag(flag), mode);
}
/**
* Asynchronously reads the entire contents of a file.
* @example Usage example
* fs.readFile('/etc/passwd', function (err, data) {
* if (err) throw err;
* console.log(data);
* });
* @param [String] filename
* @param [Object?] options
* @option options [String] encoding The string encoding for the file contents. Defaults to `null`.
* @option options [String] flag Defaults to `'r'`.
* @param [Function(BrowserFS.ApiError, String | BrowserFS.node.Buffer)] callback If no encoding is specified, then the raw buffer is returned.
*/
public static readFile(filename: string, cb?: (err: api_error.ApiError, data?: any) => void ): void;
public static readFile(filename: string, options: {[opt: string]: any}, cb?: (err: api_error.ApiError, data?: any) => void ): void;
public static readFile(filename: string, encoding: string, cb?: (err: api_error.ApiError, data?: any) => void ): void;
public static readFile(filename: string, arg2: any = {}, cb: (err: api_error.ApiError, data?: any) => void = nopCb ) {
var options = normalizeOptions(arg2, null, 'r', null);
cb = typeof arg2 === 'function' ? arg2 : cb;
var newCb = <(err: api_error.ApiError, data?: any) => void> wrapCb(cb, 2);
try {
var flag = FileFlag.getFileFlag(options['flag']);
if (!flag.isReadable()) {
return newCb(new ApiError(ErrorCode.EINVAL, 'Flag passed to readFile must allow for reading.'));
}
return fs.root.readFile(normalizePath(filename), options.encoding, flag, newCb);
} catch (e) {
return newCb(e, null);
}
}
/**
* Synchronously reads the entire contents of a file.
* @param [String] filename
* @param [Object?] options
* @option options [String] encoding The string encoding for the file contents. Defaults to `null`.
* @option options [String] flag Defaults to `'r'`.
* @return [String | BrowserFS.node.Buffer]
*/
public static readFileSync(filename: string, encoding?: string): NodeBuffer;
public static readFileSync(filename: string, options?: { encoding?: string; flag?: string; }): NodeBuffer;
public static readFileSync(filename: string, arg2: any = {}): NodeBuffer {
var options = normalizeOptions(arg2, null, 'r', null);
var flag = FileFlag.getFileFlag(options.flag);
if (!flag.isReadable()) {
throw new ApiError(ErrorCode.EINVAL, 'Flag passed to readFile must allow for reading.');
}
return fs.root.readFileSync(normalizePath(filename), options.encoding, flag);
}
/**
* Asynchronously writes data to a file, replacing the file if it already
* exists.
*
* The encoding option is ignored if data is a buffer.
*
* @example Usage example
* fs.writeFile('message.txt', 'Hello Node', function (err) {
* if (err) throw err;
* console.log('It\'s saved!');
* });
* @param [String] filename
* @param [String | BrowserFS.node.Buffer] data
* @param [Object?] options
* @option options [String] encoding Defaults to `'utf8'`.
* @option options [Number] mode Defaults to `0644`.
* @option options [String] flag Defaults to `'w'`.
* @param [Function(BrowserFS.ApiError)] callback
*/
public static writeFile(filename: string, data: any, cb?: (err?: api_error.ApiError) => void);
public static writeFile(filename: string, data: any, encoding?: string, cb?: (err?: api_error.ApiError) => void);
public static writeFile(filename: string, data: any, options?: Object, cb?: (err?: api_error.ApiError) => void);
public static writeFile(filename: string, data: any, arg3: any = {}, cb: (err?: api_error.ApiError) => void = nopCb): void {
var options = normalizeOptions(arg3, 'utf8', 'w', 0x1a4);
cb = typeof arg3 === 'function' ? arg3 : cb;
var newCb = <(err?: api_error.ApiError) => void> wrapCb(cb, 1);
try {
var flag = FileFlag.getFileFlag(options.flag);
if (!flag.isWriteable()) {
return newCb(new ApiError(ErrorCode.EINVAL, 'Flag passed to writeFile must allow for writing.'));
}
return fs.root.writeFile(normalizePath(filename), data, options.encoding, flag, options.mode, newCb);
} catch (e) {
return newCb(e);
}
}
/**
* Synchronously writes data to a file, replacing the file if it already
* exists.
*
* The encoding option is ignored if data is a buffer.
* @param [String] filename
* @param [String | BrowserFS.node.Buffer] data
* @param [Object?] options
* @option options [String] encoding Defaults to `'utf8'`.
* @option options [Number] mode Defaults to `0644`.
* @option options [String] flag Defaults to `'w'`.
*/
public static writeFileSync(filename: string, data: any, options?: Object): void;
public static writeFileSync(filename: string, data: any, encoding?: string): void;
public static writeFileSync(filename: string, data: any, arg3?: any): void {
var options = normalizeOptions(arg3, 'utf8', 'w', 0x1a4);
var flag = FileFlag.getFileFlag(options.flag);
if (!flag.isWriteable()) {
throw new ApiError(ErrorCode.EINVAL, 'Flag passed to writeFile must allow for writing.');
}
return fs.root.writeFileSync(normalizePath(filename), data, options.encoding, flag, options.mode);
}
/**
* Asynchronously append data to a file, creating the file if it not yet
* exists.
*
* @example Usage example
* fs.appendFile('message.txt', 'data to append', function (err) {
* if (err) throw err;
* console.log('The "data to append" was appended to file!');
* });
* @param [String] filename
* @param [String | BrowserFS.node.Buffer] data
* @param [Object?] options
* @option options [String] encoding Defaults to `'utf8'`.
* @option options [Number] mode Defaults to `0644`.
* @option options [String] flag Defaults to `'a'`.
* @param [Function(BrowserFS.ApiError)] callback
*/
public static appendFile(filename: string, data: any, cb?: (err: api_error.ApiError) => void): void;
public static appendFile(filename: string, data: any, options?: Object, cb?: (err: api_error.ApiError) => void): void;
public static appendFile(filename: string, data: any, encoding?: string, cb?: (err: api_error.ApiError) => void): void;
public static appendFile(filename: string, data: any, arg3?: any, cb: (err: api_error.ApiError) => void = nopCb): void {
var options = normalizeOptions(arg3, 'utf8', 'a', 0x1a4);
cb = typeof arg3 === 'function' ? arg3 : cb;
var newCb = <(err: api_error.ApiError) => void> wrapCb(cb, 1);
try {
var flag = FileFlag.getFileFlag(options.flag);
if (!flag.isAppendable()) {
return newCb(new ApiError(ErrorCode.EINVAL, 'Flag passed to appendFile must allow for appending.'));
}
fs.root.appendFile(normalizePath(filename), data, options.encoding, flag, options.mode, newCb);
} catch (e) {
newCb(e);
}
}
/**
* Asynchronously append data to a file, creating the file if it not yet
* exists.
*
* @example Usage example
* fs.appendFile('message.txt', 'data to append', function (err) {
* if (err) throw err;
* console.log('The "data to append" was appended to file!');
* });
* @param [String] filename
* @param [String | BrowserFS.node.Buffer] data
* @param [Object?] options
* @option options [String] encoding Defaults to `'utf8'`.
* @option options [Number] mode Defaults to `0644`.
* @option options [String] flag Defaults to `'a'`.
*/
public static appendFileSync(filename: string, data: any, options?: Object): void;
public static appendFileSync(filename: string, data: any, encoding?: string): void;
public static appendFileSync(filename: string, data: any, arg3?: any): void {
var options = normalizeOptions(arg3, 'utf8', 'a', 0x1a4);
var flag = FileFlag.getFileFlag(options.flag);
if (!flag.isAppendable()) {
throw new ApiError(ErrorCode.EINVAL, 'Flag passed to appendFile must allow for appending.');
}
return fs.root.appendFileSync(normalizePath(filename), data, options.encoding, flag, options.mode);
}
// FILE DESCRIPTOR METHODS
/**
* Asynchronous `fstat`.
* `fstat()` is identical to `stat()`, except that the file to be stat-ed is
* specified by the file descriptor `fd`.
* @param [BrowserFS.File] fd
* @param [Function(BrowserFS.ApiError, BrowserFS.node.fs.Stats)] callback
*/
public static fstat(fd: file.File, cb: (err: api_error.ApiError, stats?: node_fs_stats.Stats) => any = nopCb): void {
var newCb = <(err: api_error.ApiError, stats?: node_fs_stats.Stats) => any> wrapCb(cb, 2);
try {
checkFd(fd);
fd.stat(newCb);
} catch (e) {
newCb(e);
}
}
/**
* Synchronous `fstat`.
* `fstat()` is identical to `stat()`, except that the file to be stat-ed is
* specified by the file descriptor `fd`.
* @param [BrowserFS.File] fd
* @return [BrowserFS.node.fs.Stats]
*/
public static fstatSync(fd: file.File): node_fs_stats.Stats {
checkFd(fd);
return fd.statSync();
}
/**
* Asynchronous close.
* @param [BrowserFS.File] fd
* @param [Function(BrowserFS.ApiError)] callback
*/
public static close(fd: file.File, cb: Function = nopCb): void {
var newCb = wrapCb(cb, 1);
try {
checkFd(fd);
fd.close(newCb);
} catch (e) {
newCb(e);
}
}
/**
* Synchronous close.
* @param [BrowserFS.File] fd
*/
public static closeSync(fd: file.File): void {
checkFd(fd);
return fd.closeSync();
}
/**
* Asynchronous ftruncate.
* @param [BrowserFS.File] fd
* @param [Number] len
* @param [Function(BrowserFS.ApiError)] callback
*/
public static ftruncate(fd: file.File, cb?:Function);
public static ftruncate(fd: file.File, len?: number, cb?:Function);
public static ftruncate(fd: file.File, arg2?: any, cb:Function = nopCb) {
var length = typeof arg2 === 'number' ? arg2 : 0;
cb = typeof arg2 === 'function' ? arg2 : cb;
var newCb = wrapCb(cb, 1);
try {
checkFd(fd);
if (length < 0) {
throw new ApiError(ErrorCode.EINVAL);
}
fd.truncate(length, newCb);
} catch (e) {
newCb(e);
}
}
/**
* Synchronous ftruncate.
* @param [BrowserFS.File] fd
* @param [Number] len
*/
public static ftruncateSync(fd: file.File, len: number = 0) {
checkFd(fd);
return fd.truncateSync(len);
}
/**
* Asynchronous fsync.
* @param [BrowserFS.File] fd
* @param [Function(BrowserFS.ApiError)] callback
*/
public static fsync(fd: file.File, cb: Function = nopCb): void {
var newCb = wrapCb(cb, 1);
try {
checkFd(fd);
fd.sync(newCb);
} catch (e) {
newCb(e);
}
}
/**
* Synchronous fsync.
* @param [BrowserFS.File] fd
*/
public static fsyncSync(fd: file.File): void {
checkFd(fd);
return fd.syncSync();
}
/**
* Asynchronous fdatasync. | * @param [Function(BrowserFS.ApiError)] callback
*/
public static fdatasync(fd: file.File, cb: Function = nopCb): void {
var newCb = wrapCb(cb, 1);
try {
checkFd(fd);
fd.datasync(newCb);
} catch (e) {
newCb(e);
}
}
/**
* Synchronous fdatasync.
* @param [BrowserFS.File] fd
*/
public static fdatasyncSync(fd: file.File): void {
checkFd(fd);
fd.datasyncSync();
}
/**
* Write buffer to the file specified by `fd`.
* Note that it is unsafe to use fs.write multiple times on the same file
* without waiting for the callback.
* @param [BrowserFS.File] fd
* @param [BrowserFS.node.Buffer] buffer Buffer containing the data to write to
* the file.
* @param [Number] offset Offset in the buffer to start reading data from.
* @param [Number] length The amount of bytes to write to the file.
* @param [Number] position Offset from the beginning of the file where this
* data should be written. If position is null, the data will be written at
* the current position.
* @param [Function(BrowserFS.ApiError, Number, BrowserFS.node.Buffer)]
* callback The number specifies the number of bytes written into the file.
*/
public static write(fd: file.File, buffer: NodeBuffer, offset: number, length: number, cb?: (err: api_error.ApiError, written?: number, buffer?: NodeBuffer) => any): void;
public static write(fd: file.File, buffer: NodeBuffer, offset: number, length: number, position?: number, cb?: (err: api_error.ApiError, written?: number, buffer?: NodeBuffer) => any): void;
public static write(fd: file.File, data: string, cb?: (err: api_error.ApiError, written?: number, buffer?: NodeBuffer) => any): void;
public static write(fd: file.File, data: string, position: number, cb?: (err: api_error.ApiError, written?: number, buffer?: NodeBuffer) => any): void;
public static write(fd: file.File, data: string, position: number, encoding: string, cb?: (err: api_error.ApiError, written?: number, buffer?: NodeBuffer) => any): void;
public static write(fd: file.File, arg2: any, arg3?: any, arg4?: any, arg5?: any, cb: (err: api_error.ApiError, written?: number, buffer?: NodeBuffer) => any = nopCb): void {
var buffer: NodeBuffer, offset: number, length: number, position: number = null;
if (typeof arg2 === 'string') {
// Signature 1: (fd, string, [position?, [encoding?]], cb?)
var encoding = 'utf8';
switch (typeof arg3) {
case 'function':
// (fd, string, cb)
cb = arg3;
break;
case 'number':
// (fd, string, position, encoding?, cb?)
position = arg3;
encoding = typeof arg4 === 'string' ? arg4 : 'utf8';
cb = typeof arg5 === 'function' ? arg5 : cb;
break;
default:
// ...try to find the callback and get out of here!
cb = typeof arg4 === 'function' ? arg4 : typeof arg5 === 'function' ? arg5 : cb;
return cb(new ApiError(ErrorCode.EINVAL, 'Invalid arguments.'));
}
buffer = new Buffer(arg2, encoding);
offset = 0;
length = buffer.length;
} else {
// Signature 2: (fd, buffer, offset, length, position?, cb?)
buffer = arg2;
offset = arg3;
length = arg4;
position = typeof arg5 === 'number' ? arg5 : null;
cb = typeof arg5 === 'function' ? arg5 : cb;
}
var newCb = <(err: api_error.ApiError, written?: number, buffer?: NodeBuffer) => any> wrapCb(cb, 3);
try {
checkFd(fd);
if (position == null) {
position = fd.getPos();
}
fd.write(buffer, offset, length, position, newCb);
} catch (e) {
newCb(e);
}
}
/**
* Write buffer to the file specified by `fd`.
* Note that it is unsafe to use fs.write multiple times on the same file
* without waiting for it to return.
* @param [BrowserFS.File] fd
* @param [BrowserFS.node.Buffer] buffer Buffer containing the data to write to
* the file.
* @param [Number] offset Offset in the buffer to start reading data from.
* @param [Number] length The amount of bytes to write to the file.
* @param [Number] position Offset from the beginning of the file where this
* data should be written. If position is null, the data will be written at
* the current position.
* @return [Number]
*/
public static writeSync(fd: file.File, buffer: NodeBuffer, offset: number, length: number, position?: number): void;
public static writeSync(fd: file.File, data: string, position?: number, encoding?: string): void;
public static writeSync(fd: file.File, arg2: any, arg3?: any, arg4?: any, arg5?: any): number {
var buffer: NodeBuffer, offset: number = 0, length: number, position: number;
if (typeof arg2 === 'string') {
// Signature 1: (fd, string, [position?, [encoding?]])
position = typeof arg3 === 'number' ? arg3 : null;
var encoding = typeof arg4 === 'string' ? arg4 : 'utf8';
offset = 0;
buffer = new Buffer(arg2, encoding);
length = buffer.length;
} else {
// Signature 2: (fd, buffer, offset, length, position?)
buffer = arg2;
offset = arg3;
length = arg4;
position = typeof arg5 === 'number' ? arg5 : null;
}
checkFd(fd);
if (position == null) {
position = fd.getPos();
}
return fd.writeSync(buffer, offset, length, position);
}
/**
* Read data from the file specified by `fd`.
* @param [BrowserFS.File] fd
* @param [BrowserFS.node.Buffer] buffer The buffer that the data will be
* written to.
* @param [Number] offset The offset within the buffer where writing will
* start.
* @param [Number] length An integer specifying the number of bytes to read.
* @param [Number] position An integer specifying where to begin reading from
* in the file. If position is null, data will be read from the current file
* position.
* @param [Function(BrowserFS.ApiError, Number, BrowserFS.node.Buffer)]
* callback The number is the number of bytes read
*/
public static read(fd: file.File, length: number, position: number, encoding: string, cb?: (err: api_error.ApiError, data?: string, bytesRead?: number) => void): void;
public static read(fd: file.File, buffer: NodeBuffer, offset: number, length: number, position: number, cb?: (err: api_error.ApiError, bytesRead?: number, buffer?: NodeBuffer) => void): void;
public static read(fd: file.File, arg2: any, arg3: any, arg4: any, arg5?: any, cb: (err: api_error.ApiError, arg2?: any, arg3?: any) => void = nopCb): void {
var position: number, offset: number, length: number, buffer: NodeBuffer, newCb: (err: api_error.ApiError, bytesRead?: number, buffer?: NodeBuffer) => void;
if (typeof arg2 === 'number') {
// legacy interface
// (fd, length, position, encoding, callback)
length = arg2;
position = arg3;
var encoding = arg4;
cb = typeof arg5 === 'function' ? arg5 : cb;
offset = 0;
buffer = new Buffer(length);
// XXX: Inefficient.
// Wrap the cb so we shelter upper layers of the API from these
// shenanigans.
newCb = <(err: api_error.ApiError, bytesRead?: number, buffer?: NodeBuffer) => void> wrapCb((function(err, bytesRead, buf) {
if (err) {
return cb(err);
}
cb(err, buf.toString(encoding), bytesRead);
}), 3);
} else {
buffer = arg2;
offset = arg3;
length = arg4;
position = arg5;
newCb = <(err: api_error.ApiError, bytesRead?: number, buffer?: NodeBuffer) => void> wrapCb(cb, 3);
}
try {
checkFd(fd);
if (position == null) {
position = fd.getPos();
}
fd.read(buffer, offset, length, position, newCb);
} catch (e) {
newCb(e);
}
}
/**
* Read data from the file specified by `fd`.
* @param [BrowserFS.File] fd
* @param [BrowserFS.node.Buffer] buffer The buffer that the data will be
* written to.
* @param [Number] offset The offset within the buffer where writing will
* start.
* @param [Number] length An integer specifying the number of bytes to read.
* @param [Number] position An integer specifying where to begin reading from
* in the file. If position is null, data will be read from the current file
* position.
* @return [Number]
*/
public static readSync(fd: file.File, length: number, position: number, encoding: string): string;
public static readSync(fd: file.File, buffer: NodeBuffer, offset: number, length: number, position: number): number;
public static readSync(fd: file.File, arg2: any, arg3: any, arg4: any, arg5?: any): any {
var shenanigans = false;
var buffer: NodeBuffer, offset: number, length: number, position: number;
if (typeof arg2 === 'number') {
length = arg2;
position = arg3;
var encoding = arg4;
offset = 0;
buffer = new Buffer(length);
shenanigans = true;
} else {
buffer = arg2;
offset = arg3;
length = arg4;
position = arg5;
}
checkFd(fd);
if (position == null) {
position = fd.getPos();
}
var rv = fd.readSync(buffer, offset, length, position);
if (!shenanigans) {
return rv;
} else {
return [buffer.toString(encoding), rv];
}
}
/**
* Asynchronous `fchown`.
* @param [BrowserFS.File] fd
* @param [Number] uid
* @param [Number] gid
* @param [Function(BrowserFS.ApiError)] callback
*/
public static fchown(fd: file.File, uid: number, gid: number, callback: Function = nopCb): void {
var newCb = wrapCb(callback, 1);
try {
checkFd(fd);
fd.chown(uid, gid, newCb);
} catch (e) {
newCb(e);
}
}
/**
* Synchronous `fchown`.
* @param [BrowserFS.File] fd
* @param [Number] uid
* @param [Number] gid
*/
public static fchownSync(fd: file.File, uid: number, gid: number): void {
checkFd(fd);
return fd.chownSync(uid, gid);
}
/**
* Asynchronous `fchmod`.
* @param [BrowserFS.File] fd
* @param [Number] mode
* @param [Function(BrowserFS.ApiError)] callback
*/
public static fchmod(fd: file.File, mode: string, cb?: Function): void;
public static fchmod(fd: file.File, mode: number, cb?: Function): void;
public static fchmod(fd: file.File, mode: any, cb: Function = nopCb): void {
var newCb = wrapCb(cb, 1);
try {
mode = typeof mode === 'string' ? parseInt(mode, 8) : mode;
checkFd(fd);
fd.chmod(mode, newCb);
} catch (e) {
newCb(e);
}
}
/**
* Synchronous `fchmod`.
* @param [BrowserFS.File] fd
* @param [Number] mode
*/
public static fchmodSync(fd: file.File, mode: string): void;
public static fchmodSync(fd: file.File, mode: number): void;
public static fchmodSync(fd: file.File, mode: any): void {
mode = typeof mode === 'string' ? parseInt(mode, 8) : mode;
checkFd(fd);
return fd.chmodSync(mode);
}
/**
* Change the file timestamps of a file referenced by the supplied file
* descriptor.
* @param [BrowserFS.File] fd
* @param [Date] atime
* @param [Date] mtime
* @param [Function(BrowserFS.ApiError)] callback
*/
public static futimes(fd: file.File, atime: number, mtime: number, cb: Function): void;
public static futimes(fd: file.File, atime: Date, mtime: Date, cb: Function): void;
public static futimes(fd: file.File, atime: any, mtime: any, cb: Function = nopCb): void {
var newCb = wrapCb(cb, 1);
try {
checkFd(fd);
if (typeof atime === 'number') {
atime = new Date(atime * 1000);
}
if (typeof mtime === 'number') {
mtime = new Date(mtime * 1000);
}
fd.utimes(atime, mtime, newCb);
} catch (e) {
newCb(e);
}
}
/**
* Change the file timestamps of a file referenced by the supplied file
* descriptor.
* @param [BrowserFS.File] fd
* @param [Date] atime
* @param [Date] mtime
*/
public static futimesSync(fd: file.File, atime: number, mtime: number): void;
public static futimesSync(fd: file.File, atime: Date, mtime: Date): void;
public static futimesSync(fd: file.File, atime: any, mtime: any): void {
checkFd(fd);
if (typeof atime === 'number') {
atime = new Date(atime * 1000);
}
if (typeof mtime === 'number') {
mtime = new Date(mtime * 1000);
}
return fd.utimesSync(atime, mtime);
}
// DIRECTORY-ONLY METHODS
/**
* Asynchronous `rmdir`.
* @param [String] path
* @param [Function(BrowserFS.ApiError)] callback
*/
public static rmdir(path: string, cb: Function = nopCb): void {
var newCb = wrapCb(cb, 1);
try {
path = normalizePath(path);
fs.root.rmdir(path, newCb);
} catch (e) {
newCb(e);
}
}
/**
* Synchronous `rmdir`.
* @param [String] path
*/
public static rmdirSync(path: string): void {
path = normalizePath(path);
return fs.root.rmdirSync(path);
}
/**
* Asynchronous `mkdir`.
* @param [String] path
* @param [Number?] mode defaults to `0777`
* @param [Function(BrowserFS.ApiError)] callback
*/
public static mkdir(path: string, mode?: any, cb: Function = nopCb): void {
if (typeof mode === 'function') {
cb = mode;
mode = 0x1ff;
}
var newCb = wrapCb(cb, 1);
try {
path = normalizePath(path);
fs.root.mkdir(path, mode, newCb);
} catch (e) {
newCb(e);
}
}
/**
* Synchronous `mkdir`.
* @param [String] path
* @param [Number?] mode defaults to `0777`
*/
public static mkdirSync(path: string, mode?: string): void;
public static mkdirSync(path: string, mode?: number): void;
public static mkdirSync(path: string, mode: any = 0x1ff): void {
mode = typeof mode === 'string' ? parseInt(mode, 8) : mode;
path = normalizePath(path);
return fs.root.mkdirSync(path, mode);
}
/**
* Asynchronous `readdir`. Reads the contents of a directory.
* The callback gets two arguments `(err, files)` where `files` is an array of
* the names of the files in the directory excluding `'.'` and `'..'`.
* @param [String] path
* @param [Function(BrowserFS.ApiError, String[])] callback
*/
public static readdir(path: string, cb: (err: api_error.ApiError, files?: string[]) => void = nopCb): void {
var newCb = <(err: api_error.ApiError, files?: string[]) => void> wrapCb(cb, 2);
try {
path = normalizePath(path);
fs.root.readdir(path, newCb);
} catch (e) {
newCb(e);
}
}
/**
* Synchronous `readdir`. Reads the contents of a directory.
* @param [String] path
* @return [String[]]
*/
public static readdirSync(path: string): string[] {
path = normalizePath(path);
return fs.root.readdirSync(path);
}
// SYMLINK METHODS
/**
* Asynchronous `link`.
* @param [String] srcpath
* @param [String] dstpath
* @param [Function(BrowserFS.ApiError)] callback
*/
public static link(srcpath: string, dstpath: string, cb: Function = nopCb): void {
var newCb = wrapCb(cb, 1);
try {
srcpath = normalizePath(srcpath);
dstpath = normalizePath(dstpath);
fs.root.link(srcpath, dstpath, newCb);
} catch (e) {
newCb(e);
}
}
/**
* Synchronous `link`.
* @param [String] srcpath
* @param [String] dstpath
*/
public static linkSync(srcpath: string, dstpath: string): void {
srcpath = normalizePath(srcpath);
dstpath = normalizePath(dstpath);
return fs.root.linkSync(srcpath, dstpath);
}
/**
* Asynchronous `symlink`.
* @param [String] srcpath
* @param [String] dstpath
* @param [String?] type can be either `'dir'` or `'file'` (default is `'file'`)
* @param [Function(BrowserFS.ApiError)] callback
*/
public static symlink(srcpath: string, dstpath: string, cb?: Function): void;
public static symlink(srcpath: string, dstpath: string, type?: string, cb?: Function): void;
public static symlink(srcpath: string, dstpath: string, arg3?: any, cb: Function = nopCb): void {
var type = typeof arg3 === 'string' ? arg3 : 'file';
cb = typeof arg3 === 'function' ? arg3 : cb;
var newCb = wrapCb(cb, 1);
try {
if (type !== 'file' && type !== 'dir') {
return newCb(new ApiError(ErrorCode.EINVAL, "Invalid type: " + type));
}
srcpath = normalizePath(srcpath);
dstpath = normalizePath(dstpath);
fs.root.symlink(srcpath, dstpath, type, newCb);
} catch (e) {
newCb(e);
}
}
/**
* Synchronous `symlink`.
* @param [String] srcpath
* @param [String] dstpath
* @param [String?] type can be either `'dir'` or `'file'` (default is `'file'`)
*/
public static symlinkSync(srcpath: string, dstpath: string, type?: string): void {
if (type == null) {
type = 'file';
} else if (type !== 'file' && type !== 'dir') {
throw new ApiError(ErrorCode.EINVAL, "Invalid type: " + type);
}
srcpath = normalizePath(srcpath);
dstpath = normalizePath(dstpath);
return fs.root.symlinkSync(srcpath, dstpath, type);
}
/**
* Asynchronous readlink.
* @param [String] path
* @param [Function(BrowserFS.ApiError, String)] callback
*/
public static readlink(path: string, cb: (err: api_error.ApiError, linkString: string) => any = nopCb): void {
var newCb = wrapCb(cb, 2);
try {
path = normalizePath(path);
fs.root.readlink(path, newCb);
} catch (e) {
newCb(e);
}
}
/**
* Synchronous readlink.
* @param [String] path
* @return [String]
*/
public static readlinkSync(path: string): string {
path = normalizePath(path);
return fs.root.readlinkSync(path);
}
// PROPERTY OPERATIONS
/**
* Asynchronous `chown`.
* @param [String] path
* @param [Number] uid
* @param [Number] gid
* @param [Function(BrowserFS.ApiError)] callback
*/
public static chown(path: string, uid: number, gid: number, cb: Function = nopCb): void {
var newCb = wrapCb(cb, 1);
try {
path = normalizePath(path);
fs.root.chown(path, false, uid, gid, newCb);
} catch (e) {
newCb(e);
}
}
/**
* Synchronous `chown`.
* @param [String] path
* @param [Number] uid
* @param [Number] gid
*/
public static chownSync(path: string, uid: number, gid: number): void {
path = normalizePath(path);
fs.root.chownSync(path, false, uid, gid);
}
/**
* Asynchronous `lchown`.
* @param [String] path
* @param [Number] uid
* @param [Number] gid
* @param [Function(BrowserFS.ApiError)] callback
*/
public static lchown(path: string, uid: number, gid: number, cb: Function = nopCb): void {
var newCb = wrapCb(cb, 1);
try {
path = normalizePath(path);
fs.root.chown(path, true, uid, gid, newCb);
} catch (e) {
newCb(e);
}
}
/**
* Synchronous `lchown`.
* @param [String] path
* @param [Number] uid
* @param [Number] gid
*/
public static lchownSync(path: string, uid: number, gid: number): void {
path = normalizePath(path);
return fs.root.chownSync(path, true, uid, gid);
}
/**
* Asynchronous `chmod`.
* @param [String] path
* @param [Number] mode
* @param [Function(BrowserFS.ApiError)] callback
*/
public static chmod(path: string, mode: string, cb?: Function): void;
public static chmod(path: string, mode: number, cb?: Function): void;
public static chmod(path: string, mode: any, cb: Function = nopCb): void {
var newCb = wrapCb(cb, 1);
try {
mode = typeof mode === 'string' ? parseInt(mode, 8) : mode;
path = normalizePath(path);
fs.root.chmod(path, false, mode, newCb);
} catch (e) {
newCb(e);
}
}
/**
* Synchronous `chmod`.
* @param [String] path
* @param [Number] mode
*/
public static chmodSync(path: string, mode: string): void;
public static chmodSync(path: string, mode: number): void;
public static chmodSync(path: string, mode: any): void {
mode = typeof mode === 'string' ? parseInt(mode, 8) : mode;
path = normalizePath(path);
return fs.root.chmodSync(path, false, mode);
}
/**
* Asynchronous `lchmod`.
* @param [String] path
* @param [Number] mode
* @param [Function(BrowserFS.ApiError)] callback
*/
public static lchmod(path: string, mode: string, cb?: Function): void;
public static lchmod(path: string, mode: number, cb?: Function): void;
public static lchmod(path: string, mode: any, cb: Function = nopCb): void {
var newCb = wrapCb(cb, 1);
try {
mode = typeof mode === 'string' ? parseInt(mode, 8) : mode;
path = normalizePath(path);
fs.root.chmod(path, true, mode, newCb);
} catch (e) {
newCb(e);
}
}
/**
* Synchronous `lchmod`.
* @param [String] path
* @param [Number] mode
*/
public static lchmodSync(path: string, mode: number): void;
public static lchmodSync(path: string, mode: string): void;
public static lchmodSync(path: string, mode: any): void {
path = normalizePath(path);
mode = typeof mode === 'string' ? parseInt(mode, 8) : mode;
return fs.root.chmodSync(path, true, mode);
}
/**
* Change file timestamps of the file referenced by the supplied path.
* @param [String] path
* @param [Date] atime
* @param [Date] mtime
* @param [Function(BrowserFS.ApiError)] callback
*/
public static utimes(path: string, atime: number, mtime: number, cb: Function): void;
public static utimes(path: string, atime: Date, mtime: Date, cb: Function): void;
public static utimes(path: string, atime: any, mtime: any, cb: Function = nopCb): void {
var newCb = wrapCb(cb, 1);
try {
path = normalizePath(path);
if (typeof atime === 'number') {
atime = new Date(atime * 1000);
}
if (typeof mtime === 'number') {
mtime = new Date(mtime * 1000);
}
fs.root.utimes(path, atime, mtime, newCb);
} catch (e) {
newCb(e);
}
}
/**
* Change file timestamps of the file referenced by the supplied path.
* @param [String] path
* @param [Date] atime
* @param [Date] mtime
*/
public static utimesSync(path: string, atime: number, mtime: number): void;
public static utimesSync(path: string, atime: Date, mtime: Date): void;
public static utimesSync(path: string, atime: any, mtime: any): void {
path = normalizePath(path);
if (typeof atime === 'number') {
atime = new Date(atime * 1000);
}
if (typeof mtime === 'number') {
mtime = new Date(mtime * 1000);
}
return fs.root.utimesSync(path, atime, mtime);
}
/**
* Asynchronous `realpath`. The callback gets two arguments
* `(err, resolvedPath)`. May use `process.cwd` to resolve relative paths.
*
* @example Usage example
* var cache = {'/etc':'/private/etc'};
* fs.realpath('/etc/passwd', cache, function (err, resolvedPath) {
* if (err) throw err;
* console.log(resolvedPath);
* });
*
* @param [String] path
* @param [Object?] cache An object literal of mapped paths that can be used to
* force a specific path resolution or avoid additional `fs.stat` calls for
* known real paths.
* @param [Function(BrowserFS.ApiError, String)] callback
*/
public static realpath(path: string, cb?: (err: api_error.ApiError, resolvedPath?: string) =>any): void;
public static realpath(path: string, cache: {[path: string]: string}, cb: (err: api_error.ApiError, resolvedPath?: string) =>any): void;
public static realpath(path: string, arg2?: any, cb: (err: api_error.ApiError, resolvedPath?: string) =>any = nopCb): void {
var cache = typeof arg2 === 'object' ? arg2 : {};
cb = typeof arg2 === 'function' ? arg2 : nopCb;
var newCb = <(err: api_error.ApiError, resolvedPath?: string) =>any> wrapCb(cb, 2);
try {
path = normalizePath(path);
fs.root.realpath(path, cache, newCb);
} catch (e) {
newCb(e);
}
}
/**
* Synchronous `realpath`.
* @param [String] path
* @param [Object?] cache An object literal of mapped paths that can be used to
* force a specific path resolution or avoid additional `fs.stat` calls for
* known real paths.
* @return [String]
*/
public static realpathSync(path: string, cache: {[path: string]: string} = {}): string {
path = normalizePath(path);
return fs.root.realpathSync(path, cache);
}
} | * @param [BrowserFS.File] fd |
index.js | import React, { useRef, useState, useEffect } from 'react';
import { useDispatch } from "react-redux";
import { sendToServer } from "../../features/post/postSlice";
export default function CreatePost() {
const dispatch = useDispatch();
const title = useRef();
const text = useRef();
const button = useRef();
const [isSended, setSended] = useState("no");
const Button = () => {
if (isSended === "yes") {
return (
<button
className="button buttomSendOk"
type="button"
ref={button}>
Posted
</button>
);
}
else if (isSended === "no") {
return (
<button
className="button"
type="button"
ref={button}
onClick={sendPost}>
Post
</button>
);
} else {
return (
<button
className="button"
type="button"
ref={button}
onClick={sendPost}>
...
</button>
); |
const textChange = () => {
setSended("no");
if (title.current.value === "" || text.current.value === "") {
button.current.classList.remove("buttomReady");
}
else {
button.current.classList.add("buttomReady");
}
};
const sendPost = () => {
setSended("sending");
let post = {
title: title.current.value,
content: text.current.value,
};
dispatch(sendToServer(post));
setSended("yes");
};
return (
<div className="conteiner">
<div className="create-post">
<input onChange={textChange} ref={title} placeholder="Give a title to your ideia" type="text"></input>
<textarea rows={15} onChange={textChange} ref={text} placeholder="What you want to say?"></textarea>
<Button />
</div>
</div>
)
} |
}
}; |
cardMysqlViewModel.ts | import { ICardViewModel } from '@/data/models/cardViewModel'
import { RowDataPacket } from 'mysql2'
export interface ICardMysqlViewModel extends ICardViewModel, RowDataPacket {
cod: number
front: string
back: string
interval_time: number | } |
|
0006_alter_blogpost_image.py | # Generated by Django 4.0.3 on 2022-03-06 12:36
from django.db import migrations, models
class Migration(migrations.Migration):
| dependencies = [
('BlogApp', '0005_alter_blogpost_image'),
]
operations = [
migrations.AlterField(
model_name='blogpost',
name='image',
field=models.ImageField(upload_to=''),
),
] |
|
azure_privatecluster.go | // +build e2e
/*
Copyright 2020 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package e2e
import (
"context"
"fmt"
"os"
"path/filepath"
"github.com/Azure/azure-sdk-for-go/services/network/mgmt/2019-06-01/network"
"github.com/Azure/azure-sdk-for-go/services/resources/mgmt/2019-05-01/resources"
"github.com/Azure/go-autorest/autorest/azure/auth"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
"github.com/pkg/errors"
corev1 "k8s.io/api/core/v1"
"k8s.io/apimachinery/pkg/util/wait"
"k8s.io/utils/pointer"
"sigs.k8s.io/cluster-api-provider-azure/api/v1alpha4"
clusterv1 "sigs.k8s.io/cluster-api/api/v1alpha4"
capi_e2e "sigs.k8s.io/cluster-api/test/e2e"
"sigs.k8s.io/cluster-api/test/framework"
"sigs.k8s.io/cluster-api/test/framework/clusterctl"
"sigs.k8s.io/cluster-api/util"
"sigs.k8s.io/controller-runtime/pkg/client"
)
// AzurePrivateClusterSpecInput is the input for AzurePrivateClusterSpec.
type AzurePrivateClusterSpecInput struct {
BootstrapClusterProxy framework.ClusterProxy
Namespace *corev1.Namespace
ClusterName string
ClusterctlConfigPath string
E2EConfig *clusterctl.E2EConfig
ArtifactFolder string
}
// AzurePrivateClusterSpec implements a test that creates a workload cluster with a private API endpoint.
func AzurePrivateClusterSpec(ctx context.Context, inputGetter func() AzurePrivateClusterSpecInput) |
// SetupExistingVNet creates a resource group and a VNet to be used by a workload cluster.
func SetupExistingVNet(ctx context.Context, vnetCidr string, cpSubnetCidrs, nodeSubnetCidrs map[string]string) {
By("creating Azure clients with the workload cluster's subscription")
settings, err := auth.GetSettingsFromEnvironment()
Expect(err).NotTo(HaveOccurred())
subscriptionID := settings.GetSubscriptionID()
authorizer, err := settings.GetAuthorizer()
Expect(err).NotTo(HaveOccurred())
groupClient := resources.NewGroupsClient(subscriptionID)
groupClient.Authorizer = authorizer
vnetClient := network.NewVirtualNetworksClient(subscriptionID)
vnetClient.Authorizer = authorizer
nsgClient := network.NewSecurityGroupsClient(subscriptionID)
nsgClient.Authorizer = authorizer
By("creating a resource group")
groupName := os.Getenv(AzureResourceGroup)
_, err = groupClient.CreateOrUpdate(ctx, groupName, resources.Group{
Location: pointer.StringPtr(os.Getenv(AzureLocation)),
Tags: map[string]*string{
"jobName": pointer.StringPtr(os.Getenv(JobName)),
"creationTimestamp": pointer.StringPtr(os.Getenv(Timestamp)),
},
})
Expect(err).To(BeNil())
By("creating a network security group")
nsgName := "control-plane-nsg"
securityRules := []network.SecurityRule{
{
Name: pointer.StringPtr("allow_ssh"),
SecurityRulePropertiesFormat: &network.SecurityRulePropertiesFormat{
Description: pointer.StringPtr("Allow SSH"),
Priority: pointer.Int32Ptr(2200),
Protocol: network.SecurityRuleProtocolTCP,
Access: network.SecurityRuleAccessAllow,
Direction: network.SecurityRuleDirectionInbound,
SourceAddressPrefix: pointer.StringPtr("*"),
SourcePortRange: pointer.StringPtr("*"),
DestinationAddressPrefix: pointer.StringPtr("*"),
DestinationPortRange: pointer.StringPtr("22"),
},
},
{
Name: pointer.StringPtr("allow_apiserver"),
SecurityRulePropertiesFormat: &network.SecurityRulePropertiesFormat{
Description: pointer.StringPtr("Allow API Server"),
SourcePortRange: pointer.StringPtr("*"),
DestinationPortRange: pointer.StringPtr("6443"),
SourceAddressPrefix: pointer.StringPtr("*"),
DestinationAddressPrefix: pointer.StringPtr("*"),
Protocol: network.SecurityRuleProtocolTCP,
Access: network.SecurityRuleAccessAllow,
Direction: network.SecurityRuleDirectionInbound,
Priority: pointer.Int32Ptr(2201),
},
},
}
nsgFuture, err := nsgClient.CreateOrUpdate(ctx, groupName, nsgName, network.SecurityGroup{
Location: pointer.StringPtr(os.Getenv(AzureLocation)),
SecurityGroupPropertiesFormat: &network.SecurityGroupPropertiesFormat{
SecurityRules: &securityRules,
},
})
Expect(err).To(BeNil())
err = nsgFuture.WaitForCompletionRef(ctx, nsgClient.Client)
Expect(err).To(BeNil())
By("creating a virtual network")
var subnets []network.Subnet
for name, cidr := range cpSubnetCidrs {
subnets = append(subnets, network.Subnet{
SubnetPropertiesFormat: &network.SubnetPropertiesFormat{
AddressPrefix: pointer.StringPtr(cidr),
NetworkSecurityGroup: &network.SecurityGroup{
ID: pointer.StringPtr(fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/networkSecurityGroups/%s", subscriptionID, groupName, nsgName)),
},
},
Name: pointer.StringPtr(name),
})
}
for name, cidr := range nodeSubnetCidrs {
subnets = append(subnets, network.Subnet{
SubnetPropertiesFormat: &network.SubnetPropertiesFormat{
AddressPrefix: pointer.StringPtr(cidr),
},
Name: pointer.StringPtr(name),
})
}
// Create the AzureBastion subnet.
subnets = append(subnets, network.Subnet{
SubnetPropertiesFormat: &network.SubnetPropertiesFormat{
AddressPrefix: pointer.StringPtr(v1alpha4.DefaultAzureBastionSubnetCIDR),
},
Name: pointer.StringPtr(v1alpha4.DefaultAzureBastionSubnetName),
})
vnetFuture, err := vnetClient.CreateOrUpdate(ctx, groupName, os.Getenv(AzureVNetName), network.VirtualNetwork{
Location: pointer.StringPtr(os.Getenv(AzureLocation)),
VirtualNetworkPropertiesFormat: &network.VirtualNetworkPropertiesFormat{
AddressSpace: &network.AddressSpace{
AddressPrefixes: &[]string{vnetCidr},
},
Subnets: &subnets,
},
})
if err != nil {
fmt.Print(err.Error())
}
Expect(err).To(BeNil())
err = vnetFuture.WaitForCompletionRef(ctx, vnetClient.Client)
Expect(err).To(BeNil())
}
| {
var (
specName = "azure-private-cluster"
input AzurePrivateClusterSpecInput
publicClusterProxy framework.ClusterProxy
publicNamespace *corev1.Namespace
publicCancelWatches context.CancelFunc
cluster *clusterv1.Cluster
clusterName string
)
input = inputGetter()
Expect(input).ToNot(BeNil())
Expect(input.BootstrapClusterProxy).NotTo(BeNil(), "Invalid argument. input.BootstrapClusterProxy can't be nil when calling %s spec", specName)
Expect(input.Namespace).NotTo(BeNil(), "Invalid argument. input.Namespace can't be nil when calling %s spec", specName)
By("creating a Kubernetes client to the workload cluster")
publicClusterProxy = input.BootstrapClusterProxy.GetWorkloadCluster(ctx, input.Namespace.Name, input.ClusterName)
Byf("Creating a namespace for hosting the %s test spec", specName)
Logf("starting to create namespace for hosting the %s test spec", specName)
publicNamespace, publicCancelWatches = framework.CreateNamespaceAndWatchEvents(ctx, framework.CreateNamespaceAndWatchEventsInput{
Creator: publicClusterProxy.GetClient(),
ClientSet: publicClusterProxy.GetClientSet(),
Name: input.Namespace.Name,
LogFolder: filepath.Join(input.ArtifactFolder, "clusters", input.ClusterName),
})
Expect(publicNamespace).NotTo(BeNil())
Expect(publicCancelWatches).NotTo(BeNil())
By("Initializing the workload cluster")
clusterctl.InitManagementClusterAndWatchControllerLogs(ctx, clusterctl.InitManagementClusterAndWatchControllerLogsInput{
ClusterProxy: publicClusterProxy,
ClusterctlConfigPath: input.ClusterctlConfigPath,
InfrastructureProviders: input.E2EConfig.InfrastructureProviders(),
LogFolder: filepath.Join(input.ArtifactFolder, "clusters", input.ClusterName),
}, input.E2EConfig.GetIntervals(specName, "wait-controllers")...)
By("Ensure public API server is stable before creating private cluster")
Consistently(func() error {
kubeSystem := &corev1.Namespace{}
return publicClusterProxy.GetClient().Get(ctx, client.ObjectKey{Name: "kube-system"}, kubeSystem)
}, "5s", "100ms").Should(BeNil(), "Failed to assert public API server stability")
By("Creating a private workload cluster")
clusterName = fmt.Sprintf("capz-e2e-%s", util.RandomString(6))
Expect(os.Setenv(AzureInternalLBIP, "10.128.0.100")).NotTo(HaveOccurred())
result := &clusterctl.ApplyClusterTemplateAndWaitResult{}
clusterctl.ApplyClusterTemplateAndWait(ctx, clusterctl.ApplyClusterTemplateAndWaitInput{
ClusterProxy: publicClusterProxy,
ConfigCluster: clusterctl.ConfigClusterInput{
LogFolder: filepath.Join(input.ArtifactFolder, "clusters", publicClusterProxy.GetName()),
ClusterctlConfigPath: input.ClusterctlConfigPath,
KubeconfigPath: publicClusterProxy.GetKubeconfigPath(),
InfrastructureProvider: clusterctl.DefaultInfrastructureProvider,
Flavor: "private",
Namespace: input.Namespace.Name,
ClusterName: clusterName,
KubernetesVersion: input.E2EConfig.GetVariable(capi_e2e.KubernetesVersion),
ControlPlaneMachineCount: pointer.Int64Ptr(3),
WorkerMachineCount: pointer.Int64Ptr(1),
},
WaitForClusterIntervals: input.E2EConfig.GetIntervals(specName, "wait-cluster"),
WaitForControlPlaneIntervals: input.E2EConfig.GetIntervals(specName, "wait-control-plane"),
WaitForMachineDeployments: input.E2EConfig.GetIntervals(specName, "wait-worker-nodes"),
}, result)
cluster = result.Cluster
Expect(cluster).ToNot(BeNil())
// Check that azure bastion is provisioned successfully.
{
settings, err := auth.GetSettingsFromEnvironment()
Expect(err).To(BeNil())
azureBastionClient := network.NewBastionHostsClient(settings.GetSubscriptionID())
azureBastionClient.Authorizer, err = settings.GetAuthorizer()
Expect(err).To(BeNil())
groupName := os.Getenv(AzureResourceGroup)
azureBastionName := fmt.Sprintf("%s-azure-bastion", clusterName)
backoff := wait.Backoff{
Duration: retryBackoffInitialDuration,
Factor: retryBackoffFactor,
Jitter: retryBackoffJitter,
Steps: retryBackoffSteps,
}
retryFn := func() (bool, error) {
bastion, err := azureBastionClient.Get(ctx, groupName, azureBastionName)
if err != nil {
return false, err
}
switch bastion.ProvisioningState {
case network.Succeeded:
return true, nil
case network.Updating:
// Wait for operation to complete.
return false, nil
default:
return false, errors.New(fmt.Sprintf("Azure Bastion provisioning failed with state: %q", bastion.ProvisioningState))
}
}
err = wait.ExponentialBackoff(backoff, retryFn)
Expect(err).To(BeNil())
}
} |
actix.rs | #[cfg(feature = "actix")]
mod actix {
use std::env;
use actix_web::{test, App};
use matrix_sdk_appservice::*;
async fn appservice() -> Appservice {
env::set_var("RUST_LOG", "mockito=debug,matrix_sdk=debug,ruma=debug,actix_web=debug");
let _ = tracing_subscriber::fmt::try_init();
Appservice::new(
mockito::server_url().as_ref(),
"test.local",
AppserviceRegistration::try_from_yaml_str(include_str!("./registration.yaml")).unwrap(),
)
.await
.unwrap()
}
#[actix_rt::test]
async fn test_transactions() {
let appservice = appservice().await;
let app = test::init_service(App::new().service(appservice.actix_service())).await;
let transactions = r#"{
"events": [
{
"content": {},
"type": "m.dummy"
}
]
}"#;
let transactions: serde_json::Value = serde_json::from_str(transactions).unwrap();
let req = test::TestRequest::put()
.uri("/_matrix/app/v1/transactions/1?access_token=hs_token")
.set_json(&transactions)
.to_request();
let resp = test::call_service(&app, req).await;
assert_eq!(resp.status(), 200);
}
#[actix_rt::test]
async fn test_users() {
let appservice = appservice().await;
let app = test::init_service(App::new().service(appservice.actix_service())).await;
let req = test::TestRequest::get()
.uri("/_matrix/app/v1/users/%40_botty_1%3Adev.famedly.local?access_token=hs_token")
.to_request();
let resp = test::call_service(&app, req).await;
assert_eq!(resp.status(), 200);
}
#[actix_rt::test]
async fn test_invalid_access_token() {
let appservice = appservice().await;
let app = test::init_service(App::new().service(appservice.actix_service())).await;
let transactions = r#"{
"events": [
{
"content": {},
"type": "m.dummy"
}
]
}"#;
let transactions: serde_json::Value = serde_json::from_str(transactions).unwrap();
let req = test::TestRequest::put()
.uri("/_matrix/app/v1/transactions/1?access_token=invalid_token")
.set_json(&transactions)
.to_request();
let resp = test::call_service(&app, req).await;
assert_eq!(resp.status(), 401);
}
#[actix_rt::test]
async fn | () {
let appservice = appservice().await;
let app = test::init_service(App::new().service(appservice.actix_service())).await;
let transactions = r#"{
"events": [
{
"content": {},
"type": "m.dummy"
}
]
}"#;
let transactions: serde_json::Value = serde_json::from_str(transactions).unwrap();
let req = test::TestRequest::put()
.uri("/_matrix/app/v1/transactions/1")
.set_json(&transactions)
.to_request();
let resp = test::call_service(&app, req).await;
// TODO: this should actually return a 401 but is 500 because something in the
// extractor fails
assert_eq!(resp.status(), 500);
}
}
| test_no_access_token |
docker_compose.py | import importlib
import logging
from zygoat.constants import Phases, Projects
from zygoat.components import Component
from zygoat.config import yaml
from . import resources
log = logging.getLogger()
file_name = 'docker-compose.yml'
class DockerCompose(Component):
def _dump_config(self, data):
with open(file_name, 'w') as root_config:
yaml.dump(data, root_config)
def _load_config(self):
with open(file_name) as root_config:
return yaml.load(root_config.read())
def create(self):
log.info(f'Reading {file_name} from the repo')
config = self._load_config()
config['services'].update(yaml.load(importlib.resources.read_text(resources, file_name)))
log.info('Dumping updated docker-compose config')
self._dump_config(config)
def update(self):
self.call_phase(Phases.CREATE, force_create=True)
def | (self):
config = self._load_config()
log.info('Removing backend and DB services from config')
del config['services'][Projects.BACKEND]
del config['services']['db']
log.info('Dumping updated docker-compose config')
self._dump_config(config)
@property
def installed(self):
services = self._load_config()['services']
return Projects.BACKEND in services and 'db' in services
docker_compose = DockerCompose()
| delete |
spider.py | # coding=utf-8
# Copyright 2020 The HuggingFace Datasets Authors and the current dataset script contributor.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Spider: A Large-Scale Human-Labeled Dataset for Text-to-SQL Tasks"""
import json
import os
import datasets
logger = datasets.logging.get_logger(__name__)
_CITATION = """\
@article{yu2018spider,
title={Spider: A large-scale human-labeled dataset for complex and cross-domain semantic parsing and text-to-sql task},
author={Yu, Tao and Zhang, Rui and Yang, Kai and Yasunaga, Michihiro and Wang, Dongxu and Li, Zifan and Ma, James and Li, Irene and Yao, Qingning and Roman, Shanelle and others},
journal={arXiv preprint arXiv:1809.08887},
year={2018}
}
"""
_DESCRIPTION = """\
Spider is a large-scale complex and cross-domain semantic parsing and text-toSQL dataset annotated by 11 college students
"""
_HOMEPAGE = "https://yale-lily.github.io/spider"
_LICENSE = "CC BY-SA 4.0"
_URL = "https://huggingface.co/datasets/spider/resolve/main/data/spider.zip"
class | (datasets.GeneratorBasedBuilder):
VERSION = datasets.Version("1.0.0")
BUILDER_CONFIGS = [
datasets.BuilderConfig(
name="spider",
version=VERSION,
description="Spider: A Large-Scale Human-Labeled Dataset for Text-to-SQL Tasks",
),
]
def _info(self):
features = datasets.Features(
{
"db_id": datasets.Value("string"),
"query": datasets.Value("string"),
"question": datasets.Value("string"),
"query_toks": datasets.features.Sequence(datasets.Value("string")),
"query_toks_no_value": datasets.features.Sequence(datasets.Value("string")),
"question_toks": datasets.features.Sequence(datasets.Value("string")),
}
)
return datasets.DatasetInfo(
description=_DESCRIPTION,
features=features,
supervised_keys=None,
homepage=_HOMEPAGE,
license=_LICENSE,
citation=_CITATION,
)
def _split_generators(self, dl_manager):
downloaded_filepath = dl_manager.download_and_extract(_URL)
return [
datasets.SplitGenerator(
name=datasets.Split.TRAIN,
gen_kwargs={
"data_filepath": os.path.join(downloaded_filepath, "spider/train_spider.json"),
},
),
datasets.SplitGenerator(
name=datasets.Split.VALIDATION,
gen_kwargs={
"data_filepath": os.path.join(downloaded_filepath, "spider/dev.json"),
},
),
]
def _generate_examples(self, data_filepath):
"""This function returns the examples in the raw (text) form."""
logger.info("generating examples from = %s", data_filepath)
with open(data_filepath, encoding="utf-8") as f:
spider = json.load(f)
for idx, sample in enumerate(spider):
yield idx, {
"db_id": sample["db_id"],
"query": sample["query"],
"question": sample["question"],
"query_toks": sample["query_toks"],
"query_toks_no_value": sample["query_toks_no_value"],
"question_toks": sample["question_toks"],
}
| Spider |
collapse.js | /*!
* Boosted v4.5.3 (https://boosted.orange.com)
* Copyright 2014-2020 The Boosted Authors
* Copyright 2014-2020 Orange
* Licensed under MIT (https://github.com/orange-opensource/orange-boosted-bootstrap/blob/master/LICENSE)
* This a fork of Bootstrap : Initial license below
* Bootstrap collapse.js v4.5.3 (https://boosted.orange.com)
* Copyright 2011-2020 The Boosted Authors (https://github.com/Orange-OpenSource/Orange-Boosted-Bootstrap/graphs/contributors)
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)
*/
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory(require('jquery'), require('./util.js')) :
typeof define === 'function' && define.amd ? define(['jquery', './util.js'], factory) :
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, global.Collapse = factory(global.jQuery, global.Util));
}(this, (function ($, Util) { 'use strict';
function | (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
var $__default = /*#__PURE__*/_interopDefaultLegacy($);
var Util__default = /*#__PURE__*/_interopDefaultLegacy(Util);
function _extends() { _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; return _extends.apply(this, arguments); }
function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
/**
* ------------------------------------------------------------------------
* Constants
* ------------------------------------------------------------------------
*/
var NAME = 'collapse';
var VERSION = '4.5.3';
var DATA_KEY = 'bs.collapse';
var EVENT_KEY = "." + DATA_KEY;
var DATA_API_KEY = '.data-api';
var JQUERY_NO_CONFLICT = $__default['default'].fn[NAME];
var Default = {
toggle: true,
parent: ''
};
var DefaultType = {
toggle: 'boolean',
parent: '(string|element)'
};
var EVENT_SHOW = "show" + EVENT_KEY;
var EVENT_SHOWN = "shown" + EVENT_KEY;
var EVENT_HIDE = "hide" + EVENT_KEY;
var EVENT_HIDDEN = "hidden" + EVENT_KEY;
var EVENT_CLICK_DATA_API = "click" + EVENT_KEY + DATA_API_KEY;
var CLASS_NAME_SHOW = 'show';
var CLASS_NAME_COLLAPSE = 'collapse';
var CLASS_NAME_COLLAPSING = 'collapsing';
var CLASS_NAME_COLLAPSED = 'collapsed';
var DIMENSION_WIDTH = 'width';
var DIMENSION_HEIGHT = 'height';
var SELECTOR_ACTIVES = '*:not(.multi) > .show, *:not(.multi) > .collapsing'; // Boosted mod
var SELECTOR_DATA_TOGGLE = '[data-toggle="collapse"]';
/**
* ------------------------------------------------------------------------
* Class Definition
* ------------------------------------------------------------------------
*/
var Collapse = /*#__PURE__*/function () {
function Collapse(element, config) {
this._isTransitioning = false;
this._element = element;
this._config = this._getConfig(config);
this._triggerArray = [].slice.call(document.querySelectorAll("[data-toggle=\"collapse\"][href=\"#" + element.id + "\"]," + ("[data-toggle=\"collapse\"][data-target=\"#" + element.id + "\"]")));
var toggleList = [].slice.call(document.querySelectorAll(SELECTOR_DATA_TOGGLE));
for (var i = 0, len = toggleList.length; i < len; i++) {
var elem = toggleList[i];
var selector = Util__default['default'].getSelectorFromElement(elem);
var filterElement = [].slice.call(document.querySelectorAll(selector)).filter(function (foundElem) {
return foundElem === element;
});
if (selector !== null && filterElement.length > 0) {
this._selector = selector;
this._triggerArray.push(elem);
}
}
this._parent = this._config.parent ? this._getParent() : null;
if (!this._config.parent) {
this._addAriaAndCollapsedClass(this._element, this._triggerArray);
}
if (this._config.toggle) {
this.toggle();
}
} // Getters
var _proto = Collapse.prototype;
// Public
_proto.toggle = function toggle() {
if ($__default['default'](this._element).hasClass(CLASS_NAME_SHOW)) {
this.hide();
} else {
this.show();
}
};
_proto.show = function show() {
var _this = this;
if (this._isTransitioning || $__default['default'](this._element).hasClass(CLASS_NAME_SHOW)) {
return;
}
var actives;
var activesData;
if (this._parent) {
actives = [].slice.call(this._parent.querySelectorAll(SELECTOR_ACTIVES)).filter(function (elem) {
if (typeof _this._config.parent === 'string') {
return elem.getAttribute('data-parent') === _this._config.parent;
}
return elem.classList.contains(CLASS_NAME_COLLAPSE);
});
if (actives.length === 0) {
actives = null;
}
}
if (actives) {
activesData = $__default['default'](actives).not(this._selector).data(DATA_KEY);
if (activesData && activesData._isTransitioning) {
return;
}
}
var startEvent = $__default['default'].Event(EVENT_SHOW);
$__default['default'](this._element).trigger(startEvent);
if (startEvent.isDefaultPrevented()) {
return;
}
if (actives) {
Collapse._jQueryInterface.call($__default['default'](actives).not(this._selector), 'hide');
if (!activesData) {
$__default['default'](actives).data(DATA_KEY, null);
}
}
var dimension = this._getDimension();
$__default['default'](this._element).removeClass(CLASS_NAME_COLLAPSE).addClass(CLASS_NAME_COLLAPSING);
this._element.style[dimension] = 0;
if (this._triggerArray.length) {
$__default['default'](this._triggerArray).removeClass(CLASS_NAME_COLLAPSED).attr('aria-expanded', true);
}
this.setTransitioning(true);
var complete = function complete() {
$__default['default'](_this._element).removeClass(CLASS_NAME_COLLAPSING).addClass(CLASS_NAME_COLLAPSE + " " + CLASS_NAME_SHOW);
_this._element.style[dimension] = '';
_this.setTransitioning(false);
$__default['default'](_this._element).trigger(EVENT_SHOWN);
};
var capitalizedDimension = dimension[0].toUpperCase() + dimension.slice(1);
var scrollSize = "scroll" + capitalizedDimension;
var transitionDuration = Util__default['default'].getTransitionDurationFromElement(this._element);
$__default['default'](this._element).one(Util__default['default'].TRANSITION_END, complete).emulateTransitionEnd(transitionDuration);
this._element.style[dimension] = this._element[scrollSize] + "px";
};
_proto.hide = function hide() {
var _this2 = this;
if (this._isTransitioning || !$__default['default'](this._element).hasClass(CLASS_NAME_SHOW)) {
return;
}
var startEvent = $__default['default'].Event(EVENT_HIDE);
$__default['default'](this._element).trigger(startEvent);
if (startEvent.isDefaultPrevented()) {
return;
}
var dimension = this._getDimension();
this._element.style[dimension] = this._element.getBoundingClientRect()[dimension] + "px";
Util__default['default'].reflow(this._element);
$__default['default'](this._element).addClass(CLASS_NAME_COLLAPSING).removeClass(CLASS_NAME_COLLAPSE + " " + CLASS_NAME_SHOW);
var triggerArrayLength = this._triggerArray.length;
if (triggerArrayLength > 0) {
for (var i = 0; i < triggerArrayLength; i++) {
var trigger = this._triggerArray[i];
var selector = Util__default['default'].getSelectorFromElement(trigger);
if (selector !== null) {
var $elem = $__default['default']([].slice.call(document.querySelectorAll(selector)));
if (!$elem.hasClass(CLASS_NAME_SHOW)) {
$__default['default'](trigger).addClass(CLASS_NAME_COLLAPSED).attr('aria-expanded', false);
}
}
}
}
this.setTransitioning(true);
var complete = function complete() {
_this2.setTransitioning(false);
$__default['default'](_this2._element).removeClass(CLASS_NAME_COLLAPSING).addClass(CLASS_NAME_COLLAPSE).trigger(EVENT_HIDDEN);
};
this._element.style[dimension] = '';
var transitionDuration = Util__default['default'].getTransitionDurationFromElement(this._element);
$__default['default'](this._element).one(Util__default['default'].TRANSITION_END, complete).emulateTransitionEnd(transitionDuration);
};
_proto.setTransitioning = function setTransitioning(isTransitioning) {
this._isTransitioning = isTransitioning;
};
_proto.dispose = function dispose() {
$__default['default'].removeData(this._element, DATA_KEY);
this._config = null;
this._parent = null;
this._element = null;
this._triggerArray = null;
this._isTransitioning = null;
} // Private
;
_proto._getConfig = function _getConfig(config) {
config = _extends({}, Default, config);
config.toggle = Boolean(config.toggle); // Coerce string values
Util__default['default'].typeCheckConfig(NAME, config, DefaultType);
return config;
};
_proto._getDimension = function _getDimension() {
var hasWidth = $__default['default'](this._element).hasClass(DIMENSION_WIDTH);
return hasWidth ? DIMENSION_WIDTH : DIMENSION_HEIGHT;
};
_proto._getParent = function _getParent() {
var _this3 = this;
var parent;
if (Util__default['default'].isElement(this._config.parent)) {
parent = this._config.parent; // It's a jQuery object
if (typeof this._config.parent.jquery !== 'undefined') {
parent = this._config.parent[0];
}
} else {
parent = document.querySelector(this._config.parent);
}
var selector = "[data-toggle=\"collapse\"][data-parent=\"" + this._config.parent + "\"]";
var children = [].slice.call(parent.querySelectorAll(selector));
$__default['default'](children).each(function (i, element) {
_this3._addAriaAndCollapsedClass(Collapse._getTargetFromElement(element), [element]);
});
return parent;
};
_proto._addAriaAndCollapsedClass = function _addAriaAndCollapsedClass(element, triggerArray) {
var isOpen = $__default['default'](element).hasClass(CLASS_NAME_SHOW);
if (triggerArray.length) {
$__default['default'](triggerArray).toggleClass(CLASS_NAME_COLLAPSED, !isOpen).attr('aria-expanded', isOpen);
}
} // Static
;
Collapse._getTargetFromElement = function _getTargetFromElement(element) {
var selector = Util__default['default'].getSelectorFromElement(element);
return selector ? document.querySelector(selector) : null;
};
Collapse._jQueryInterface = function _jQueryInterface(config) {
return this.each(function () {
var $element = $__default['default'](this);
var data = $element.data(DATA_KEY);
var _config = _extends({}, Default, $element.data(), typeof config === 'object' && config ? config : {});
if (!data && _config.toggle && typeof config === 'string' && /show|hide|init/.test(config)) {
// Boosted mod
_config.toggle = false;
}
if (!data) {
data = new Collapse(this, _config);
$element.data(DATA_KEY, data);
}
if (typeof config === 'string' && config !== 'init') {
// Boosted mod
if (typeof data[config] === 'undefined') {
throw new TypeError("No method named \"" + config + "\"");
}
data[config]();
}
});
};
_createClass(Collapse, null, [{
key: "VERSION",
get: function get() {
return VERSION;
}
}, {
key: "Default",
get: function get() {
return Default;
}
}]);
return Collapse;
}();
/**
* ------------------------------------------------------------------------
* Data Api implementation
* ------------------------------------------------------------------------
*/
$__default['default'](document).on(EVENT_CLICK_DATA_API, SELECTOR_DATA_TOGGLE, function (event) {
// preventDefault only for <a> elements (which change the URL) not inside the collapsible element
if (event.currentTarget.tagName === 'A') {
event.preventDefault();
}
var $trigger = $__default['default'](this);
var selector = Util__default['default'].getSelectorFromElement(this);
var selectors = [].slice.call(document.querySelectorAll(selector));
$__default['default'](selectors).each(function () {
var $target = $__default['default'](this);
var data = $target.data(DATA_KEY);
var config = data ? 'toggle' : $trigger.data();
Collapse._jQueryInterface.call($target, config);
});
}) // Boosted mod
.on('DOMContentLoaded', function () {
$__default['default'](SELECTOR_DATA_TOGGLE).each(function () {
var target = Collapse._getTargetFromElement(this);
Collapse._jQueryInterface.call($__default['default'](target), 'init');
});
}); // end mod
/**
* ------------------------------------------------------------------------
* jQuery
* ------------------------------------------------------------------------
*/
$__default['default'].fn[NAME] = Collapse._jQueryInterface;
$__default['default'].fn[NAME].Constructor = Collapse;
$__default['default'].fn[NAME].noConflict = function () {
$__default['default'].fn[NAME] = JQUERY_NO_CONFLICT;
return Collapse._jQueryInterface;
};
return Collapse;
})));
//# sourceMappingURL=collapse.js.map
| _interopDefaultLegacy |
storage-info.go | // +build ignore
//
// MinIO Object Storage (c) 2021 MinIO, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0 | // Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
package main
import (
"context"
"log"
"github.com/minio/madmin-go"
)
func main() {
// Note: YOUR-ACCESSKEYID, YOUR-SECRETACCESSKEY and my-bucketname are
// dummy values, please replace them with original values.
// API requests are secure (HTTPS) if secure=true and insecure (HTTPS) otherwise.
// New returns an MinIO Admin client object.
madmClnt, err := madmin.New("your-minio.example.com:9000", "YOUR-ACCESSKEYID", "YOUR-SECRETACCESSKEY", true)
if err != nil {
log.Fatalln(err)
}
st, err := madmClnt.StorageInfo(context.Background())
if err != nil {
log.Fatalln(err)
}
log.Printf("%+v\n", st)
} | // |
request.ts | interface RequestOptions {
body?: string | object;
headers?: {[name: string]: string};
}
interface MockResponse {
method: string;
url: string;
options: RequestOptions; | }
export async function request(
method: string,
url: string,
options: RequestOptions,
): Promise<string> {
return new Promise<string>((resolve) => {
setTimeout(() => {
resolve(JSON.stringify({method, url, options}));
});
});
} | |
common_tags.py | from django import template
from django.conf import settings
from django.utils.safestring import mark_safe
register = template.Library()
# settings value
@register.simple_tag
def settings_value(name):
| defaults = {
'SITE_HEADER': '<b>Map</b>Ground',
'SITE_TITLE': 'MapGround'
}
if name in defaults:
return mark_safe(getattr(settings, name, defaults[name]))
else:
return '' |
|
metasrv_flight_tls.rs | // Copyright 2021 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use common_base::tokio;
use common_exception::ErrorCode;
use common_flight_rpc::FlightClientTlsConfig;
use common_meta_api::MetaApi;
use common_meta_flight::MetaFlightClient;
use pretty_assertions::assert_eq;
use crate::init_meta_ut;
use crate::tests::service::MetaSrvTestContext;
use crate::tests::start_metasrv_with_context;
use crate::tests::tls_constants::TEST_CA_CERT;
use crate::tests::tls_constants::TEST_CN_NAME;
use crate::tests::tls_constants::TEST_SERVER_CERT;
use crate::tests::tls_constants::TEST_SERVER_KEY;
#[tokio::test(flavor = "multi_thread", worker_threads = 1)]
async fn test_tls_server() -> anyhow::Result<()> {
let (_log_guards, ut_span) = init_meta_ut!();
let _ent = ut_span.enter();
let mut tc = MetaSrvTestContext::new(0);
tc.config.flight_tls_server_key = TEST_SERVER_KEY.to_owned();
tc.config.flight_tls_server_cert = TEST_SERVER_CERT.to_owned();
let r = start_metasrv_with_context(&mut tc).await;
assert!(r.is_ok());
let addr = tc.config.flight_api_address.clone();
let tls_conf = FlightClientTlsConfig {
rpc_tls_server_root_ca_cert: TEST_CA_CERT.to_string(),
domain_name: TEST_CN_NAME.to_string(),
};
let client =
MetaFlightClient::with_tls_conf(addr.as_str(), "root", "xxx", None, Some(tls_conf)).await?;
let r = client
.get_table(("do not care", "do not care").into())
.await;
assert!(r.is_err());
Ok(()) | async fn test_tls_server_config_failure() -> anyhow::Result<()> {
let (_log_guards, ut_span) = init_meta_ut!();
let _ent = ut_span.enter();
let mut tc = MetaSrvTestContext::new(0);
tc.config.flight_tls_server_key = "../tests/data/certs/not_exist.key".to_owned();
tc.config.flight_tls_server_cert = "../tests/data/certs/not_exist.pem".to_owned();
let r = start_metasrv_with_context(&mut tc).await;
assert!(r.is_err());
Ok(())
}
#[tokio::test(flavor = "multi_thread", worker_threads = 1)]
async fn test_tls_client_config_failure() -> anyhow::Result<()> {
let (_log_guards, ut_span) = init_meta_ut!();
let _ent = ut_span.enter();
let tls_conf = FlightClientTlsConfig {
rpc_tls_server_root_ca_cert: "../tests/data/certs/not_exist.pem".to_string(),
domain_name: TEST_CN_NAME.to_string(),
};
let r = MetaFlightClient::with_tls_conf("addr", "root", "xxx", None, Some(tls_conf)).await;
assert!(r.is_err());
if let Err(e) = r {
assert_eq!(e.code(), ErrorCode::TLSConfigurationFailure("").code());
}
Ok(())
} | }
#[tokio::test(flavor = "multi_thread", worker_threads = 1)] |
azdo.py | import os
def | (outputlist, jsonObject):
"""
This function convert a dict to Azure DevOps pipelines variable
outputlist : dict { terraform_output : azure devpops variable}
jsonOject : the terraform output in Json format (terraform output -json)
"""
if(len(outputlist) > 0):
for k, v in outputlist.items():
tfoutput_name = k
azdovar = str(v)
if tfoutput_name in jsonObject.keys():
var_value = jsonObject[tfoutput_name]["value"]
print(
"Run [echo ##vso[task.setvariable variable="+azdovar+";]"+var_value+"]")
os.system(
"echo ##vso[task.setvariable variable="+azdovar+";]"+var_value+"")
else:
print("key {} is not present in terraform output".format(
tfoutput_name))
| tfoutputtoAzdo |
nexus_model_fetch_result.go | // Code generated by go-swagger; DO NOT EDIT.
package models
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"context"
"strconv"
"github.com/go-openapi/errors"
"github.com/go-openapi/strfmt"
"github.com/go-openapi/swag"
)
// NexusModelFetchResult nexus model fetch result
//
// swagger:model NexusModelFetchResult
type NexusModelFetchResult struct {
// at next link
AtNextLink string `json:"@nextLink,omitempty"`
// at recordset count
AtRecordsetCount int32 `json:"@recordsetCount,omitempty"`
// page key
PageKey string `json:"pageKey,omitempty"`
// value
Value []*NexusModel `json:"value"`
}
// Validate validates this nexus model fetch result
func (m *NexusModelFetchResult) Validate(formats strfmt.Registry) error {
var res []error
if err := m.validateValue(formats); err != nil {
res = append(res, err)
}
if len(res) > 0 {
return errors.CompositeValidationError(res...)
}
return nil
}
func (m *NexusModelFetchResult) validateValue(formats strfmt.Registry) error {
if swag.IsZero(m.Value) { // not required
return nil
}
for i := 0; i < len(m.Value); i++ {
if swag.IsZero(m.Value[i]) { // not required
continue
}
if m.Value[i] != nil {
if err := m.Value[i].Validate(formats); err != nil {
if ve, ok := err.(*errors.Validation); ok {
return ve.ValidateName("value" + "." + strconv.Itoa(i))
} else if ce, ok := err.(*errors.CompositeError); ok {
return ce.ValidateName("value" + "." + strconv.Itoa(i))
}
return err
}
}
}
return nil
}
// ContextValidate validate this nexus model fetch result based on the context it is used
func (m *NexusModelFetchResult) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
var res []error
if err := m.contextValidateValue(ctx, formats); err != nil {
res = append(res, err)
}
if len(res) > 0 |
return nil
}
func (m *NexusModelFetchResult) contextValidateValue(ctx context.Context, formats strfmt.Registry) error {
for i := 0; i < len(m.Value); i++ {
if m.Value[i] != nil {
if err := m.Value[i].ContextValidate(ctx, formats); err != nil {
if ve, ok := err.(*errors.Validation); ok {
return ve.ValidateName("value" + "." + strconv.Itoa(i))
} else if ce, ok := err.(*errors.CompositeError); ok {
return ce.ValidateName("value" + "." + strconv.Itoa(i))
}
return err
}
}
}
return nil
}
// MarshalBinary interface implementation
func (m *NexusModelFetchResult) MarshalBinary() ([]byte, error) {
if m == nil {
return nil, nil
}
return swag.WriteJSON(m)
}
// UnmarshalBinary interface implementation
func (m *NexusModelFetchResult) UnmarshalBinary(b []byte) error {
var res NexusModelFetchResult
if err := swag.ReadJSON(b, &res); err != nil {
return err
}
*m = res
return nil
}
| {
return errors.CompositeValidationError(res...)
} |
thorlabs_mpc320.py | """ Module for controlling Thorlabs motorized pollarization paddles """
import ctypes
from ctypes import Structure
import time
from pylabnet.utils.logging.logger import LogHandler
#from comtypes.typeinfo import SAFEARRAYABOUND
#enum FT_Status
FT_OK = ctypes.c_short(0x00)
FT_InvalidHandle = ctypes.c_short(0x0)
FT_DeviceNotFound = ctypes.c_short(0x02)
FT_DeviceNotOpened = ctypes.c_short(0x03)
FT_IOError = ctypes.c_short(0x04)
FT_InsufficientResources = ctypes.c_short(0x05)
FT_InvalidParameter = ctypes.c_short(0x06)
FT_DeviceNotPresent = ctypes.c_short(0x07)
FT_IncorrectDevice = ctypes.c_short(0x08)
FT_Status = ctypes.c_short
#enum MOT_MotorTypes
MOT_NotMotor = ctypes.c_int(0)
MOT_DCMotor = ctypes.c_int(1)
MOT_StepperMotor = ctypes.c_int(2)
MOT_BrushlessMotor = ctypes.c_int(3)
MOT_CustomMotor = ctypes.c_int(100)
MOT_MotorTypes = ctypes.c_int
#enum POL_Paddle
paddle1 = ctypes.c_uint16(1)
paddle2 = ctypes.c_uint16(2)
paddle3 = ctypes.c_uint16(3)
POL_Paddles = ctypes.c_uint16
#enum POL_PaddleBits
none_ctype = ctypes.c_ushort(0x0) #is None in header file
PaddleBit1 = ctypes.c_ushort(0x01)
PaddleBit2 = ctypes.c_ushort(0x02)
PaddleBit4 = ctypes.c_ushort(0x04)
AllPaddles = ctypes.c_ushort(0x07)
POL_PaddleBits = ctypes.c_ushort
#enum MOT_TravelDirection
MOT_TravelDirectionDisabled = ctypes.c_short(0x00)
MOT_Forwards = ctypes.c_short(0x01)
MOT_Reverse = ctypes.c_short(0x02)
MOT_TravelDirection = ctypes.c_short
#enum MPC_IOModes
MPC_ToggleOnPositiveEdge = ctypes.c_ulong(0x01)
MPC_SetPositionOnPositiveEdge = ctypes.c_ulong(0x02)
MPC_OutputHighAtSetPosition = ctypes.c_ulong(0x04)
MPC_OutputHighWhemMoving = ctypes.c_ulong(0x08)
MPC_IOModes = ctypes.c_ulong
class TLI_DeviceInfo(Structure):
_fields_ = [("typeID", ctypes.c_ulong),
("description", (65 * ctypes.c_char)), #changed from 65* _char
("serialNo", (9 * ctypes.c_char)), #changed from 9* _char
("PID", ctypes.c_ulong),# wintypes.DWORD
("isKnownType", ctypes.c_bool),
("motorType", MOT_MotorTypes),
("isPiezoDevice", ctypes.c_bool),
("isLaser", ctypes.c_bool),
("isCustomType", ctypes.c_bool),
("isRack", ctypes.c_bool),
("maxPaddles", ctypes.c_short)]
# class TLI_HardwareInformation(Structure):
# _fields_ = [("serialNumber", ctypes.c_ulong),
# ("modelNumber", (8 * ctypes.c_char)),
# ("type", ctypes.c_ushort),
# ("firmwareVersion", ctypes.c_ulong),
# ("notes", (48 * ctypes.c_char)),
# ("deviceDependantData", (12 * ctypes.c_byte)),
# ("hardwareVersion", ctypes.c_ushort),
# ("modificationState", ctypes.c_ushort),
# ("numChannels", ctypes.c_ushort)]
class TLI_PolarizerParameters(Structure):
_fields_ = [("Velocity", ctypes.c_ushort),
("HomePosition", ctypes.c_double),
("JogSize1", ctypes.c_double),
("JogSize2", ctypes.c_double),
("JogSize3", ctypes.c_double)]
#class SAFEARRAYBOUND(Strcuture):
# _fields_ = [("cElements" , ctypes.c_ulong),
# ("lLbound" , ctypes.c_long)]
#class SAFEARRAY(Strcuture):
# _fields_ = [("cDims", ctypes.c_ushort),
# ("fFeatures", ctypes.c_ushort),
# ("cbElements", ctypes.c_ulong),
# ("cLocks", ctypes.c_ulong),
# ("pvData", ctypes.c_void_p),
# ("rgsabound", SAFEARRAYBOUND * 1)]
class Driver():
def __init__(self, device_num, logger):
"""Instantiate driver class.
device_num is numbering of devices connected via USB. Drivrt then finds serial numbers of polarization paddle by Driver, e.g. b'38154354' """
# Instantiate log.
self.log = LogHandler(logger=logger)
#Loads polarization contorolles DLL and define arguments and result 5types for c function
self._polarizationdll = ctypes.cdll.LoadLibrary('Thorlabs.MotionControl.Polarizer.dll')
self._devmanagerdll = ctypes.cdll.LoadLibrary('Thorlabs.MotionControl.DeviceManager.dll')
self._configure_functions()
#get device list size
if self._polarizationdll.TLI_BuildDeviceList() == 0:
num_devs = self._polarizationdll.TLI_GetDeviceListSize()
#print(f"There are {num_devs} devices connected")
#Get devices serial numbers
serialNos = ctypes.create_string_buffer(100) #the way to have a mutable buffer
serialNosSize = ctypes.c_ulong(ctypes.sizeof(serialNos))
List = self._polarizationdll.TLI_GetDeviceListByTypeExt(serialNos, serialNosSize, 38)
#if List:
# print("Failed to get device list")
#else:
# print("Device list created succesfully") #change these massages to interact with logger
self.dev_name = serialNos.value.decode("utf-8") #.strip().split(',')
#print(f"Connected to device {self.dev_name}")
#get device info including serial number
self.device_info = TLI_DeviceInfo() # container for device info
self._polarizationdll.TLI_GetDeviceInfo(serialNos[(device_num - 1) * 9:(device_num * 9) - 1], ctypes.byref(self.device_info)) #when there will be a few devices figure out how to seperate and access each one
self.device = serialNos[(device_num - 1) * 9:(device_num * 9) - 1]
#print("Description: ", self.device_info.description)
#print("Serial No: ", self.device_info.serialNo)
#print("Motor Type: ", self.device_info.motorType)
#print("USB PID: ", self.device_info.PID)
#print("Max Number of Paddles: ", self.device_info.maxPaddles)
#establising connection to device
self.paddles = [paddle1, paddle3, paddle2]
connection = self._polarizationdll.MPC_Open(self.device)
if connection == 0:
self.log.info(f"Successfully connected to {self.device}.")
else:
self.log.error(f"Connection to {self.device} failed due to error {connection}.")
#technical methods
def _configure_functions(self):
""" Defines arguments and results for c functions """
self._polarizationdll.TLI_BuildDeviceList.argtype = None
self._polarizationdll.TLI_BuildDeviceList.restype = ctypes.c_short
self._polarizationdll.TLI_GetDeviceListSize.argtype = None
self._polarizationdll.TLI_GetDeviceListSize.restype = ctypes.c_short
self._polarizationdll.TLI_GetDeviceInfo.argtypes = [ctypes.POINTER(ctypes.c_char), ctypes.POINTER(TLI_DeviceInfo)]
self._polarizationdll.TLI_GetDeviceInfo.restype = ctypes.c_short
self._polarizationdll.TLI_GetDeviceListByTypeExt.argtypes = [ctypes.POINTER(ctypes.c_char), ctypes.c_ulong, ctypes.c_int]
self._polarizationdll.TLI_GetDeviceListByTypeExt.restype = ctypes.c_short
self._polarizationdll.MPC_Open.argtype = ctypes.POINTER(ctypes.c_char)
self._polarizationdll.MPC_Open.restype = ctypes.c_short
self._polarizationdll.MPC_Close.argtype = ctypes.POINTER(ctypes.c_char)
self._polarizationdll.MPC_Close.restype = ctypes.c_short
self._polarizationdll.MPC_CheckConnection.argtype = ctypes.c_char_p
self._polarizationdll.MPC_CheckConnection.restype = ctypes.c_bool
self._polarizationdll.MPC_GetPosition.argtypes = [ctypes.POINTER(ctypes.c_char), POL_Paddles]
self._polarizationdll.MPC_GetPosition.restype = ctypes.c_double
self._polarizationdll.MPC_RequestPolParams.argtype = ctypes.POINTER(ctypes.c_char)
self._polarizationdll.MPC_RequestPolParams.restype = ctypes.c_short
self._polarizationdll.MPC_GetPolParams.argtypes = [ctypes.POINTER(ctypes.c_char), ctypes.POINTER(TLI_PolarizerParameters)]
self._polarizationdll.MPC_GetPolParams.restype = ctypes.c_short
self._polarizationdll.MPC_SetPolParams.argtypes = [ctypes.POINTER(ctypes.c_char), ctypes.POINTER(TLI_PolarizerParameters)]
self._polarizationdll.MPC_SetPolParams.restype = ctypes.c_short
self._polarizationdll.MPC_SetJogSize.argtypes = [ctypes.POINTER(ctypes.c_char), POL_Paddles, ctypes.c_double]
self._polarizationdll.MPC_SetJogSize.restype = ctypes.c_short
self._polarizationdll.MPC_Jog.argtypes = [ctypes.POINTER(ctypes.c_char), POL_Paddles, MOT_TravelDirection]
self._polarizationdll.MPC_Jog.restype = ctypes.c_short
self._polarizationdll.MPC_GetMaxTravel.argtype = ctypes.POINTER(ctypes.c_char)
self._polarizationdll.MPC_GetMaxTravel.restype = ctypes.c_double
self._polarizationdll.MPC_MoveToPosition.argtypes = [ctypes.POINTER(ctypes.c_char), POL_Paddles, ctypes.c_double]
self._polarizationdll.MPC_MoveToPosition.restype = ctypes.c_short
self._polarizationdll.MPC_Stop.argtypes = [ctypes.POINTER(ctypes.c_char), POL_Paddles]
self._polarizationdll.MPC_Stop.restype = ctypes.c_short
self._polarizationdll.MPC_Home.argtypes = [ctypes.POINTER(ctypes.c_char), POL_Paddles]
self._polarizationdll.MPC_Home.restype = ctypes.c_short
self._polarizationdll.MPC_Jog.argtypes = [ctypes.POINTER(ctypes.c_char), ctypes.POINTER(TLI_PolarizerParameters), MOT_TravelDirection]
self._polarizationdll.MPC_Jog.restype = ctypes.c_short
self._polarizationdll.MPC_StartPolling.argtypes = [ctypes.POINTER(ctypes.c_char), ctypes.c_int]
self._polarizationdll.MPC_StartPolling.restype = ctypes.c_bool
self._polarizationdll.MPC_StopPolling.argtype = ctypes.POINTER(ctypes.c_char)
self._polarizationdll.MPC_StopPolling.restype = ctypes.c_void_p #did not find the a c_void with no pointer as needed
self._polarizationdll.MPC_SetVelocity.argtypes = [ctypes.POINTER(ctypes.c_char), ctypes.c_short]
self._polarizationdll.MPC_SetVelocity.restype = ctypes.c_short
self._polarizationdll.MPC_MoveRelative.argtypes = [ctypes.POINTER(ctypes.c_char), POL_Paddles, ctypes.c_double]
self._polarizationdll.MPC_MoveRelative.restype = ctypes.c_short
self._polarizationdll.MPC_GetStepsPerDegree.argtype = [ctypes.POINTER(ctypes.c_char)]
self._polarizationdll.MPC_GetStepsPerDegree.result = ctypes.c_double
#wrap function for external use
def open(self):
result = self._polarizationdll.MPC_Open(self.device)
if result == 0:
print("Connected succesfully to device")
else:
print("A problem occured when trying to connect to device")
def close(self):
resultc = self._polarizationdll.MPC_Close(self.device)
if resultc == 0:
print("Closed connection to device")
else:
print("A problem occured when trying to diconnect from device")
def home(self, paddle_num):
home_result = self._polarizationdll.MPC_Home(self.device, self.paddles[paddle_num])
return home_result
def set_velocity(self, velocity):
velocity = self._polarizationdll.MPC_SetVelocity(self.device, velocity)
def | (self, paddle_num, pos, sleep_time):
#posinitial = self._polarizationdll.MPC_GetPosition(self.device, self.paddles[paddle_num])
move_result = self._polarizationdll.MPC_MoveToPosition(self.device, self.paddles[paddle_num], pos)
time.sleep(abs(sleep_time * pos / 170))
#posfinal = self._polarizationdll.MPC_GetPosition(self.device, self.paddles[paddle_num])
return move_result #, posinitial, posfinal
def move_rel(self, paddle_num, step, sleep_time):
#posinitial = self._polarizationdll.MPC_GetPosition(self.device, self.paddles[paddle_num])
move_result = self._polarizationdll.MPC_MoveRelative(self.device, self.paddles[paddle_num], step)
time.sleep(abs(sleep_time * step / 170))
#posfinal = self._polarizationdll.MPC_GetPosition(self.device, self.paddles[paddle_num])
return move_result #, posinitial, posfinal
def get_angle(self, paddle_num):
currentpos = self._polarizationdll.MPC_GetPosition(self.device, self.paddles[paddle_num])
return currentpos
| move |
injection.packager.ts | // tslint:disable:no-namespace
// tslint:disable:max-classes-per-file
// tslint:disable:object-literal-sort-keys
declare var Json: any;
export namespace Packager {
export function join(...items: any[]): string | Uint8Array | Error {
if (items instanceof Array && items.length === 1 && items[0] instanceof Array) {
items = items[0];
}
if (!(items instanceof Array) || items.length === 0) {
return new Error(`No arguments provided to join`);
}
const strs: any[] = [];
const bytes: number[] = [];
let isBinary: boolean | undefined;
try {
items.forEach((item: any, i: number) => {
if (item instanceof Uint8Array && (isBinary === undefined || isBinary === true)) {
isBinary = true;
if (i === 0) {
// Set type as array
bytes.push(Json.Scheme.Types.array);
}
// Set length of item
bytes.push(...Json.Impls.Uint32.toUint8(item.length));
// Put item
bytes.push(...item);
} else if (typeof item === 'string' && (isBinary === undefined || isBinary === false)) {
isBinary = false;
strs.push(item);
} else {
throw new Error(`Only strings or Uint8Array can be joined. Each array item should be same type.`);
}
});
if (isBinary) {
return new Uint8Array(bytes);
}
} catch (error) {
return error;
}
return JSON.stringify(strs);
}
export function | (source: string | Uint8Array): string[] | Uint8Array[] | Error {
if (!isPackage(source)) {
return new Error(`Source isn't a package of protocol data.`);
}
if (source instanceof ArrayBuffer) {
source = new Uint8Array(source);
}
if (source instanceof Uint8Array) {
let buffer = source.slice(1, source.length);
const items: Uint8Array[] = [];
do {
const itemLength = Json.Impls.Uint32.fromUint8(buffer.slice(0, 4));
items.push(buffer.slice(4, 4 + itemLength));
buffer = buffer.slice(4 + itemLength, buffer.length);
} while (buffer.length > 0);
return items;
} else {
return JSON.parse(source) as string[];
}
}
export function isPackage(source: any): boolean {
if (source instanceof Uint8Array) {
return source[0] === Json.Scheme.Types.array;
} else if (source instanceof ArrayBuffer) {
const uint8array: Uint8Array = new Uint8Array(source);
return uint8array.length > 0 ? (uint8array[0] === Json.Scheme.Types.array) : false;
} else if (typeof source === 'string') {
try {
return JSON.parse(source) instanceof Array;
} catch (error) {
return false;
}
} else {
return false;
}
}
}
| split |
test_re.py | import sys
sys.path = ['.'] + sys.path
from test.test_support import verbose, run_unittest
import re
from sre import Scanner
import sys, os, traceback
# Misc tests from Tim Peters' re.doc
# WARNING: Don't change details in these tests if you don't know
# what you're doing. Some of these tests were carefuly modeled to
# cover most of the code.
import unittest
class ReTests(unittest.TestCase):
def test_search_star_plus(self):
self.assertEqual(re.search('x*', 'axx').span(0), (0, 0))
self.assertEqual(re.search('x*', 'axx').span(), (0, 0))
self.assertEqual(re.search('x+', 'axx').span(0), (1, 3))
self.assertEqual(re.search('x+', 'axx').span(), (1, 3))
self.assertEqual(re.search('x', 'aaa'), None)
self.assertEqual(re.match('a*', 'xxx').span(0), (0, 0))
self.assertEqual(re.match('a*', 'xxx').span(), (0, 0))
self.assertEqual(re.match('x*', 'xxxa').span(0), (0, 3))
self.assertEqual(re.match('x*', 'xxxa').span(), (0, 3))
self.assertEqual(re.match('a+', 'xxx'), None)
def bump_num(self, matchobj):
int_value = int(matchobj.group(0))
return str(int_value + 1)
def test_basic_re_sub(self):
self.assertEqual(re.sub("(?i)b+", "x", "bbbb BBBB"), 'x x')
self.assertEqual(re.sub(r'\d+', self.bump_num, '08.2 -2 23x99y'),
'9.3 -3 24x100y')
self.assertEqual(re.sub(r'\d+', self.bump_num, '08.2 -2 23x99y', 3),
'9.3 -3 23x99y')
self.assertEqual(re.sub('.', lambda m: r"\n", 'x'), '\\n')
self.assertEqual(re.sub('.', r"\n", 'x'), '\n')
s = r"\1\1"
self.assertEqual(re.sub('(.)', s, 'x'), 'xx')
self.assertEqual(re.sub('(.)', re.escape(s), 'x'), s)
self.assertEqual(re.sub('(.)', lambda m: s, 'x'), s)
self.assertEqual(re.sub('(?P<a>x)', '\g<a>\g<a>', 'xx'), 'xxxx')
self.assertEqual(re.sub('(?P<a>x)', '\g<a>\g<1>', 'xx'), 'xxxx')
self.assertEqual(re.sub('(?P<unk>x)', '\g<unk>\g<unk>', 'xx'), 'xxxx')
self.assertEqual(re.sub('(?P<unk>x)', '\g<1>\g<1>', 'xx'), 'xxxx')
self.assertEqual(re.sub('a',r'\t\n\v\r\f\a\b\B\Z\a\A\w\W\s\S\d\D','a'),
'\t\n\v\r\f\a\b\\B\\Z\a\\A\\w\\W\\s\\S\\d\\D')
self.assertEqual(re.sub('a', '\t\n\v\r\f\a', 'a'), '\t\n\v\r\f\a')
self.assertEqual(re.sub('a', '\t\n\v\r\f\a', 'a'),
(chr(9)+chr(10)+chr(11)+chr(13)+chr(12)+chr(7)))
self.assertEqual(re.sub('^\s*', 'X', 'test'), 'Xtest')
def test_bug_449964(self):
# fails for group followed by other escape
self.assertEqual(re.sub(r'(?P<unk>x)', '\g<1>\g<1>\\b', 'xx'),
'xx\bxx\b')
def test_bug_449000(self):
# Test for sub() on escaped characters
self.assertEqual(re.sub(r'\r\n', r'\n', 'abc\r\ndef\r\n'),
'abc\ndef\n')
self.assertEqual(re.sub('\r\n', r'\n', 'abc\r\ndef\r\n'),
'abc\ndef\n')
self.assertEqual(re.sub(r'\r\n', '\n', 'abc\r\ndef\r\n'),
'abc\ndef\n')
self.assertEqual(re.sub('\r\n', '\n', 'abc\r\ndef\r\n'),
'abc\ndef\n')
def test_qualified_re_sub(self):
self.assertEqual(re.sub('a', 'b', 'aaaaa'), 'bbbbb')
self.assertEqual(re.sub('a', 'b', 'aaaaa', 1), 'baaaa')
def test_bug_114660(self):
self.assertEqual(re.sub(r'(\S)\s+(\S)', r'\1 \2', 'hello there'),
'hello there')
def test_bug_462270(self):
# Test for empty sub() behaviour, see SF bug #462270
self.assertEqual(re.sub('x*', '-', 'abxd'), '-a-b-d-')
self.assertEqual(re.sub('x+', '-', 'abxd'), 'ab-d')
def test_symbolic_refs(self):
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g<a', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g<', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g<a a>', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g<1a1>', 'xx')
self.assertRaises(IndexError, re.sub, '(?P<a>x)', '\g<ab>', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)|(?P<b>y)', '\g<b>', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)|(?P<b>y)', '\\2', 'xx')
def test_re_subn(self):
self.assertEqual(re.subn("(?i)b+", "x", "bbbb BBBB"), ('x x', 2))
self.assertEqual(re.subn("b+", "x", "bbbb BBBB"), ('x BBBB', 1))
self.assertEqual(re.subn("b+", "x", "xyz"), ('xyz', 0))
self.assertEqual(re.subn("b*", "x", "xyz"), ('xxxyxzx', 4))
self.assertEqual(re.subn("b*", "x", "xyz", 2), ('xxxyz', 2))
def test_re_split(self):
self.assertEqual(re.split(":", ":a:b::c"), ['', 'a', 'b', '', 'c'])
self.assertEqual(re.split(":*", ":a:b::c"), ['', 'a', 'b', 'c'])
self.assertEqual(re.split("(:*)", ":a:b::c"),
['', ':', 'a', ':', 'b', '::', 'c'])
self.assertEqual(re.split("(?::*)", ":a:b::c"), ['', 'a', 'b', 'c'])
self.assertEqual(re.split("(:)*", ":a:b::c"),
['', ':', 'a', ':', 'b', ':', 'c'])
self.assertEqual(re.split("([b:]+)", ":a:b::c"),
['', ':', 'a', ':b::', 'c'])
self.assertEqual(re.split("(b)|(:+)", ":a:b::c"),
['', None, ':', 'a', None, ':', '', 'b', None, '',
None, '::', 'c'])
self.assertEqual(re.split("(?:b)|(?::+)", ":a:b::c"),
['', 'a', '', '', 'c'])
def | (self):
self.assertEqual(re.split(":", ":a:b::c", 2), ['', 'a', 'b::c'])
self.assertEqual(re.split(':', 'a:b:c:d', 2), ['a', 'b', 'c:d'])
self.assertEqual(re.split("(:)", ":a:b::c", 2),
['', ':', 'a', ':', 'b::c'])
self.assertEqual(re.split("(:*)", ":a:b::c", 2),
['', ':', 'a', ':', 'b::c'])
def test_re_findall(self):
self.assertEqual(re.findall(":+", "abc"), [])
self.assertEqual(re.findall(":+", "a:b::c:::d"), [":", "::", ":::"])
self.assertEqual(re.findall("(:+)", "a:b::c:::d"), [":", "::", ":::"])
self.assertEqual(re.findall("(:)(:*)", "a:b::c:::d"), [(":", ""),
(":", ":"),
(":", "::")])
def test_bug_117612(self):
self.assertEqual(re.findall(r"(a|(b))", "aba"),
[("a", ""),("b", "b"),("a", "")])
def test_re_match(self):
self.assertEqual(re.match('a', 'a').groups(), ())
self.assertEqual(re.match('(a)', 'a').groups(), ('a',))
self.assertEqual(re.match(r'(a)', 'a').group(0), 'a')
self.assertEqual(re.match(r'(a)', 'a').group(1), 'a')
self.assertEqual(re.match(r'(a)', 'a').group(1, 1), ('a', 'a'))
pat = re.compile('((a)|(b))(c)?')
self.assertEqual(pat.match('a').groups(), ('a', 'a', None, None))
self.assertEqual(pat.match('b').groups(), ('b', None, 'b', None))
self.assertEqual(pat.match('ac').groups(), ('a', 'a', None, 'c'))
self.assertEqual(pat.match('bc').groups(), ('b', None, 'b', 'c'))
self.assertEqual(pat.match('bc').groups(""), ('b', "", 'b', 'c'))
# A single group
m = re.match('(a)', 'a')
self.assertEqual(m.group(0), 'a')
self.assertEqual(m.group(0), 'a')
self.assertEqual(m.group(1), 'a')
self.assertEqual(m.group(1, 1), ('a', 'a'))
pat = re.compile('(?:(?P<a1>a)|(?P<b2>b))(?P<c3>c)?')
self.assertEqual(pat.match('a').group(1, 2, 3), ('a', None, None))
self.assertEqual(pat.match('b').group('a1', 'b2', 'c3'),
(None, 'b', None))
self.assertEqual(pat.match('ac').group(1, 'b2', 3), ('a', None, 'c'))
def test_re_groupref_exists(self):
return # not yet
self.assertEqual(re.match('^(\()?([^()]+)(?(1)\))$', '(a)').groups(),
('(', 'a'))
self.assertEqual(re.match('^(\()?([^()]+)(?(1)\))$', 'a').groups(),
(None, 'a'))
self.assertEqual(re.match('^(\()?([^()]+)(?(1)\))$', 'a)'), None)
self.assertEqual(re.match('^(\()?([^()]+)(?(1)\))$', '(a'), None)
self.assertEqual(re.match('^(?:(a)|c)((?(1)b|d))$', 'ab').groups(),
('a', 'b'))
self.assertEqual(re.match('^(?:(a)|c)((?(1)b|d))$', 'cd').groups(),
(None, 'd'))
self.assertEqual(re.match('^(?:(a)|c)((?(1)|d))$', 'cd').groups(),
(None, 'd'))
self.assertEqual(re.match('^(?:(a)|c)((?(1)|d))$', 'a').groups(),
('a', ''))
def test_re_groupref(self):
self.assertEqual(re.match(r'^(\|)?([^()]+)\1$', '|a|').groups(),
('|', 'a'))
self.assertEqual(re.match(r'^(\|)?([^()]+)\1?$', 'a').groups(),
(None, 'a'))
self.assertEqual(re.match(r'^(\|)?([^()]+)\1$', 'a|'), None)
self.assertEqual(re.match(r'^(\|)?([^()]+)\1$', '|a'), None)
self.assertEqual(re.match(r'^(?:(a)|c)(\1)$', 'aa').groups(),
('a', 'a'))
self.assertEqual(re.match(r'^(?:(a)|c)(\1)?$', 'c').groups(),
(None, None))
def test_groupdict(self):
self.assertEqual(re.match('(?P<first>first) (?P<second>second)',
'first second').groupdict(),
{'first':'first', 'second':'second'})
def test_expand(self):
self.assertEqual(re.match("(?P<first>first) (?P<second>second)",
"first second")
.expand(r"\2 \1 \g<second> \g<first>"),
"second first second first")
def test_repeat_minmax(self):
self.assertEqual(re.match("^(\w){1}$", "abc"), None)
self.assertEqual(re.match("^(\w){1}?$", "abc"), None)
self.assertEqual(re.match("^(\w){1,2}$", "abc"), None)
self.assertEqual(re.match("^(\w){1,2}?$", "abc"), None)
self.assertEqual(re.match("^(\w){3}$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){1,3}$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){1,4}$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){3,4}?$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){3}?$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){1,3}?$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){1,4}?$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){3,4}?$", "abc").group(1), "c")
self.assertEqual(re.match("^x{1}$", "xxx"), None)
self.assertEqual(re.match("^x{1}?$", "xxx"), None)
self.assertEqual(re.match("^x{1,2}$", "xxx"), None)
self.assertEqual(re.match("^x{1,2}?$", "xxx"), None)
self.assertNotEqual(re.match("^x{3}$", "xxx"), None)
self.assertNotEqual(re.match("^x{1,3}$", "xxx"), None)
self.assertNotEqual(re.match("^x{1,4}$", "xxx"), None)
self.assertNotEqual(re.match("^x{3,4}?$", "xxx"), None)
self.assertNotEqual(re.match("^x{3}?$", "xxx"), None)
self.assertNotEqual(re.match("^x{1,3}?$", "xxx"), None)
self.assertNotEqual(re.match("^x{1,4}?$", "xxx"), None)
self.assertNotEqual(re.match("^x{3,4}?$", "xxx"), None)
def test_getattr(self):
self.assertEqual(re.match("(a)", "a").pos, 0)
self.assertEqual(re.match("(a)", "a").endpos, 1)
self.assertEqual(re.match("(a)", "a").string, "a")
self.assertEqual(re.match("(a)", "a").regs, ((0, 1), (0, 1)))
self.assertNotEqual(re.match("(a)", "a").re, None)
def test_special_escapes(self):
self.assertEqual(re.search(r"\b(b.)\b",
"abcd abc bcd bx").group(1), "bx")
self.assertEqual(re.search(r"\B(b.)\B",
"abc bcd bc abxd").group(1), "bx")
self.assertEqual(re.search(r"\b(b.)\b",
"abcd abc bcd bx", re.LOCALE).group(1), "bx")
self.assertEqual(re.search(r"\B(b.)\B",
"abc bcd bc abxd", re.LOCALE).group(1), "bx")
self.assertEqual(re.search(r"\b(b.)\b",
"abcd abc bcd bx", re.UNICODE).group(1), "bx")
self.assertEqual(re.search(r"\B(b.)\B",
"abc bcd bc abxd", re.UNICODE).group(1), "bx")
self.assertEqual(re.search(r"^abc$", "\nabc\n", re.M).group(0), "abc")
self.assertEqual(re.search(r"^\Aabc\Z$", "abc", re.M).group(0), "abc")
self.assertEqual(re.search(r"^\Aabc\Z$", "\nabc\n", re.M), None)
self.assertEqual(re.search(r"\b(b.)\b",
u"abcd abc bcd bx").group(1), "bx")
self.assertEqual(re.search(r"\B(b.)\B",
u"abc bcd bc abxd").group(1), "bx")
self.assertEqual(re.search(r"^abc$", u"\nabc\n", re.M).group(0), "abc")
self.assertEqual(re.search(r"^\Aabc\Z$", u"abc", re.M).group(0), "abc")
self.assertEqual(re.search(r"^\Aabc\Z$", u"\nabc\n", re.M), None)
self.assertEqual(re.search(r"\d\D\w\W\s\S",
"1aa! a").group(0), "1aa! a")
self.assertEqual(re.search(r"\d\D\w\W\s\S",
"1aa! a", re.LOCALE).group(0), "1aa! a")
self.assertEqual(re.search(r"\d\D\w\W\s\S",
"1aa! a", re.UNICODE).group(0), "1aa! a")
def test_ignore_case(self):
self.assertEqual(re.match("abc", "ABC", re.I).group(0), "ABC")
self.assertEqual(re.match("abc", u"ABC", re.I).group(0), "ABC")
def test_bigcharset(self):
self.assertEqual(re.match(u"([\u2222\u2223])",
u"\u2222").group(1), u"\u2222")
self.assertEqual(re.match(u"([\u2222\u2223])",
u"\u2222", re.UNICODE).group(1), u"\u2222")
def test_anyall(self):
self.assertEqual(re.match("a.b", "a\nb", re.DOTALL).group(0),
"a\nb")
self.assertEqual(re.match("a.*b", "a\n\nb", re.DOTALL).group(0),
"a\n\nb")
def test_non_consuming(self):
self.assertEqual(re.match("(a(?=\s[^a]))", "a b").group(1), "a")
self.assertEqual(re.match("(a(?=\s[^a]*))", "a b").group(1), "a")
self.assertEqual(re.match("(a(?=\s[abc]))", "a b").group(1), "a")
self.assertEqual(re.match("(a(?=\s[abc]*))", "a bc").group(1), "a")
self.assertEqual(re.match(r"(a)(?=\s\1)", "a a").group(1), "a")
self.assertEqual(re.match(r"(a)(?=\s\1*)", "a aa").group(1), "a")
self.assertEqual(re.match(r"(a)(?=\s(abc|a))", "a a").group(1), "a")
self.assertEqual(re.match(r"(a(?!\s[^a]))", "a a").group(1), "a")
self.assertEqual(re.match(r"(a(?!\s[abc]))", "a d").group(1), "a")
self.assertEqual(re.match(r"(a)(?!\s\1)", "a b").group(1), "a")
self.assertEqual(re.match(r"(a)(?!\s(abc|a))", "a b").group(1), "a")
def test_ignore_case(self):
self.assertEqual(re.match(r"(a\s[^a])", "a b", re.I).group(1), "a b")
self.assertEqual(re.match(r"(a\s[^a]*)", "a bb", re.I).group(1), "a bb")
self.assertEqual(re.match(r"(a\s[abc])", "a b", re.I).group(1), "a b")
self.assertEqual(re.match(r"(a\s[abc]*)", "a bb", re.I).group(1), "a bb")
self.assertEqual(re.match(r"((a)\s\2)", "a a", re.I).group(1), "a a")
self.assertEqual(re.match(r"((a)\s\2*)", "a aa", re.I).group(1), "a aa")
self.assertEqual(re.match(r"((a)\s(abc|a))", "a a", re.I).group(1), "a a")
self.assertEqual(re.match(r"((a)\s(abc|a)*)", "a aa", re.I).group(1), "a aa")
def test_category(self):
self.assertEqual(re.match(r"(\s)", " ").group(1), " ")
def test_getlower(self):
import _sre
self.assertEqual(_sre.getlower(ord('A'), 0), ord('a'))
self.assertEqual(_sre.getlower(ord('A'), re.LOCALE), ord('a'))
self.assertEqual(_sre.getlower(ord('A'), re.UNICODE), ord('a'))
self.assertEqual(re.match("abc", "ABC", re.I).group(0), "ABC")
self.assertEqual(re.match("abc", u"ABC", re.I).group(0), "ABC")
def test_not_literal(self):
self.assertEqual(re.search("\s([^a])", " b").group(1), "b")
self.assertEqual(re.search("\s([^a]*)", " bb").group(1), "bb")
def test_search_coverage(self):
self.assertEqual(re.search("\s(b)", " b").group(1), "b")
self.assertEqual(re.search("a\s", "a ").group(0), "a ")
def test_re_escape(self):
p=""
for i in range(0, 256):
p = p + chr(i)
self.assertEqual(re.match(re.escape(chr(i)), chr(i)) is not None,
True)
self.assertEqual(re.match(re.escape(chr(i)), chr(i)).span(), (0,1))
pat=re.compile(re.escape(p))
self.assertEqual(pat.match(p) is not None, True)
self.assertEqual(pat.match(p).span(), (0,256))
def test_pickling(self):
import pickle
self.pickle_test(pickle)
import cPickle
self.pickle_test(cPickle)
def pickle_test(self, pickle):
oldpat = re.compile('a(?:b|(c|e){1,2}?|d)+?(.)')
s = pickle.dumps(oldpat)
newpat = pickle.loads(s)
self.assertEqual(oldpat, newpat)
def test_constants(self):
self.assertEqual(re.I, re.IGNORECASE)
self.assertEqual(re.L, re.LOCALE)
self.assertEqual(re.M, re.MULTILINE)
self.assertEqual(re.S, re.DOTALL)
self.assertEqual(re.X, re.VERBOSE)
def test_flags(self):
for flag in [re.I, re.M, re.X, re.S, re.L]:
self.assertNotEqual(re.compile('^pattern$', flag), None)
def test_sre_character_literals(self):
for i in [0, 8, 16, 32, 64, 127, 128, 255]:
self.assertNotEqual(re.match(r"\%03o" % i, chr(i)), None)
self.assertNotEqual(re.match(r"\%03o0" % i, chr(i)+"0"), None)
self.assertNotEqual(re.match(r"\%03o8" % i, chr(i)+"8"), None)
self.assertNotEqual(re.match(r"\x%02x" % i, chr(i)), None)
self.assertNotEqual(re.match(r"\x%02x0" % i, chr(i)+"0"), None)
self.assertNotEqual(re.match(r"\x%02xz" % i, chr(i)+"z"), None)
self.assertRaises(re.error, re.match, "\911", "")
def test_bug_113254(self):
self.assertEqual(re.match(r'(a)|(b)', 'b').start(1), -1)
self.assertEqual(re.match(r'(a)|(b)', 'b').end(1), -1)
self.assertEqual(re.match(r'(a)|(b)', 'b').span(1), (-1, -1))
def test_bug_527371(self):
# bug described in patches 527371/672491
self.assertEqual(re.match(r'(a)?a','a').lastindex, None)
self.assertEqual(re.match(r'(a)(b)?b','ab').lastindex, 1)
self.assertEqual(re.match(r'(?P<a>a)(?P<b>b)?b','ab').lastgroup, 'a')
self.assertEqual(re.match("(?P<a>a(b))", "ab").lastgroup, 'a')
self.assertEqual(re.match("((a))", "a").lastindex, 1)
def test_bug_545855(self):
# bug 545855 -- This pattern failed to cause a compile error as it
# should, instead provoking a TypeError.
self.assertRaises(re.error, re.compile, 'foo[a-')
def test_bug_418626(self):
# bugs 418626 at al. -- Testing Greg Chapman's addition of op code
# SRE_OP_MIN_REPEAT_ONE for eliminating recursion on simple uses of
# pattern '*?' on a long string.
self.assertEqual(re.match('.*?c', 10000*'ab'+'cd').end(0), 20001)
self.assertEqual(re.match('.*?cd', 5000*'ab'+'c'+5000*'ab'+'cde').end(0),
20003)
self.assertEqual(re.match('.*?cd', 20000*'abc'+'de').end(0), 60001)
# non-simple '*?' still recurses and hits the recursion limit
self.assertRaises(RuntimeError, re.search, '(a|b)*?c', 10000*'ab'+'cd')
def test_bug_612074(self):
pat=u"["+re.escape(u"\u2039")+u"]"
self.assertEqual(re.compile(pat) and 1, 1)
def test_stack_overflow(self):
# nasty case that overflows the straightforward recursive
# implementation of repeated groups.
self.assertRaises(RuntimeError, re.match, '(x)*', 50000*'x')
self.assertRaises(RuntimeError, re.match, '(x)*y', 50000*'x'+'y')
self.assertRaises(RuntimeError, re.match, '(x)*?y', 50000*'x'+'y')
def test_scanner(self):
def s_ident(scanner, token): return token
def s_operator(scanner, token): return "op%s" % token
def s_float(scanner, token): return float(token)
def s_int(scanner, token): return int(token)
scanner = Scanner([
(r"[a-zA-Z_]\w*", s_ident),
(r"\d+\.\d*", s_float),
(r"\d+", s_int),
(r"=|\+|-|\*|/", s_operator),
(r"\s+", None),
])
self.assertNotEqual(scanner.scanner.scanner("").pattern, None)
self.assertEqual(scanner.scan("sum = 3*foo + 312.50 + bar"),
(['sum', 'op=', 3, 'op*', 'foo', 'op+', 312.5,
'op+', 'bar'], ''))
def test_bug_448951(self):
# bug 448951 (similar to 429357, but with single char match)
# (Also test greedy matches.)
for op in '','?','*':
self.assertEqual(re.match(r'((.%s):)?z'%op, 'z').groups(),
(None, None))
self.assertEqual(re.match(r'((.%s):)?z'%op, 'a:z').groups(),
('a:', 'a'))
def test_bug_725106(self):
# capturing groups in alternatives in repeats
self.assertEqual(re.match('^((a)|b)*', 'abc').groups(),
('b', 'a'))
self.assertEqual(re.match('^(([ab])|c)*', 'abc').groups(),
('c', 'b'))
self.assertEqual(re.match('^((d)|[ab])*', 'abc').groups(),
('b', None))
self.assertEqual(re.match('^((a)c|[ab])*', 'abc').groups(),
('b', None))
self.assertEqual(re.match('^((a)|b)*?c', 'abc').groups(),
('b', 'a'))
self.assertEqual(re.match('^(([ab])|c)*?d', 'abcd').groups(),
('c', 'b'))
self.assertEqual(re.match('^((d)|[ab])*?c', 'abc').groups(),
('b', None))
self.assertEqual(re.match('^((a)c|[ab])*?c', 'abc').groups(),
('b', None))
def test_bug_725149(self):
# mark_stack_base restoring before restoring marks
self.assertEqual(re.match('(a)(?:(?=(b)*)c)*', 'abb').groups(),
('a', None))
self.assertEqual(re.match('(a)((?!(b)*))*', 'abb').groups(),
('a', None, None))
def test_bug_764548(self):
# bug 764548, re.compile() barfs on str/unicode subclasses
try:
unicode
except NameError:
return # no problem if we have no unicode
class my_unicode(unicode): pass
pat = re.compile(my_unicode("abc"))
self.assertEqual(pat.match("xyz"), None)
def test_finditer(self):
iter = re.finditer(r":+", "a:b::c:::d")
self.assertEqual([item.group(0) for item in iter],
[":", "::", ":::"])
def run_re_tests():
from test.re_tests import benchmarks, tests, SUCCEED, FAIL, SYNTAX_ERROR
if verbose:
print 'Running re_tests test suite'
else:
# To save time, only run the first and last 10 tests
#tests = tests[:10] + tests[-10:]
pass
for t in tests:
sys.stdout.flush()
pattern = s = outcome = repl = expected = None
if len(t) == 5:
pattern, s, outcome, repl, expected = t
elif len(t) == 3:
pattern, s, outcome = t
else:
raise ValueError, ('Test tuples should have 3 or 5 fields', t)
try:
obj = re.compile(pattern)
except re.error:
if outcome == SYNTAX_ERROR: pass # Expected a syntax error
else:
print '=== Syntax error:', t
except KeyboardInterrupt: raise KeyboardInterrupt
except:
print '*** Unexpected error ***', t
if verbose:
traceback.print_exc(file=sys.stdout)
else:
try:
result = obj.search(s)
except re.error, msg:
print '=== Unexpected exception', t, repr(msg)
if outcome == SYNTAX_ERROR:
# This should have been a syntax error; forget it.
pass
elif outcome == FAIL:
if result is None: pass # No match, as expected
else: print '=== Succeeded incorrectly', t
elif outcome == SUCCEED:
if result is not None:
# Matched, as expected, so now we compute the
# result string and compare it to our expected result.
start, end = result.span(0)
vardict={'found': result.group(0),
'groups': result.group(),
'flags': result.re.flags}
for i in range(1, 100):
try:
gi = result.group(i)
# Special hack because else the string concat fails:
if gi is None:
gi = "None"
except IndexError:
gi = "Error"
vardict['g%d' % i] = gi
for i in result.re.groupindex.keys():
try:
gi = result.group(i)
if gi is None:
gi = "None"
except IndexError:
gi = "Error"
vardict[i] = gi
repl = eval(repl, vardict)
if repl != expected:
print '=== grouping error', t,
print repr(repl) + ' should be ' + repr(expected)
else:
print '=== Failed incorrectly', t
# Try the match on a unicode string, and check that it
# still succeeds.
try:
result = obj.search(unicode(s, "latin-1"))
if result is None:
print '=== Fails on unicode match', t
except NameError:
continue # 1.5.2
except TypeError:
continue # unicode test case
# Try the match on a unicode pattern, and check that it
# still succeeds.
obj=re.compile(unicode(pattern, "latin-1"))
result = obj.search(s)
if result is None:
print '=== Fails on unicode pattern match', t
# Try the match with the search area limited to the extent
# of the match and see if it still succeeds. \B will
# break (because it won't match at the end or start of a
# string), so we'll ignore patterns that feature it.
if pattern[:2] != '\\B' and pattern[-2:] != '\\B' \
and result is not None:
obj = re.compile(pattern)
result = obj.search(s, result.start(0), result.end(0) + 1)
if result is None:
print '=== Failed on range-limited match', t
# Try the match with IGNORECASE enabled, and check that it
# still succeeds.
obj = re.compile(pattern, re.IGNORECASE)
result = obj.search(s)
if result is None:
print '=== Fails on case-insensitive match', t
# Try the match with LOCALE enabled, and check that it
# still succeeds.
obj = re.compile(pattern, re.LOCALE)
result = obj.search(s)
if result is None:
print '=== Fails on locale-sensitive match', t
# Try the match with UNICODE locale enabled, and check
# that it still succeeds.
obj = re.compile(pattern, re.UNICODE)
result = obj.search(s)
if result is None:
print '=== Fails on unicode-sensitive match', t
def test_main():
run_unittest(ReTests)
run_re_tests()
if __name__ == "__main__":
test_main()
| test_qualified_re_split |
testes_basicos.py | import unittest
def soma(param, param1):
return param + param1
class BasicoTests(unittest.TestCase):
def test_soma(self):
resultado = soma(1, 2)
self.assertEqual(3, resultado)
resultado = soma(3, 2)
self.assertEqual(5, resultado)
if __name__ == '__main__': | unittest.main() |
|
additional_unattend_content.py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class AdditionalUnattendContent(Model):
| """Additional XML formatted information that can be included in the
Unattend.xml file, which is used by Windows Setup. Contents are defined by
setting name, component name, and the pass in which the content is a
applied.
:param pass_name: The pass name. Currently, the only allowable value is
oobeSystem. Possible values include: 'oobeSystem'
:type pass_name: str or :class:`PassNames
<azure.mgmt.compute.compute.v2017_03_30.models.PassNames>`
:param component_name: The component name. Currently, the only allowable
value is Microsoft-Windows-Shell-Setup. Possible values include:
'Microsoft-Windows-Shell-Setup'
:type component_name: str or :class:`ComponentNames
<azure.mgmt.compute.compute.v2017_03_30.models.ComponentNames>`
:param setting_name: Setting name (e.g. FirstLogonCommands, AutoLogon ).
Possible values include: 'AutoLogon', 'FirstLogonCommands'
:type setting_name: str or :class:`SettingNames
<azure.mgmt.compute.compute.v2017_03_30.models.SettingNames>`
:param content: XML formatted content that is added to the unattend.xml
file in the specified pass and component. The XML must be less than 4 KB
and must include the root element for the setting or feature that is being
inserted.
:type content: str
"""
_attribute_map = {
'pass_name': {'key': 'passName', 'type': 'PassNames'},
'component_name': {'key': 'componentName', 'type': 'ComponentNames'},
'setting_name': {'key': 'settingName', 'type': 'SettingNames'},
'content': {'key': 'content', 'type': 'str'},
}
def __init__(self, pass_name=None, component_name=None, setting_name=None, content=None):
self.pass_name = pass_name
self.component_name = component_name
self.setting_name = setting_name
self.content = content |
|
UpdateTransitRouterVbrAttachmentAttributeRequest.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkcbn.endpoint import endpoint_data
class UpdateTransitRouterVbrAttachmentAttributeRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Cbn', '2017-09-12', 'UpdateTransitRouterVbrAttachmentAttribute','cbn')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self):
return self.get_query_params().get('ResourceOwnerId')
def | (self,ResourceOwnerId):
self.add_query_param('ResourceOwnerId',ResourceOwnerId)
def get_ClientToken(self):
return self.get_query_params().get('ClientToken')
def set_ClientToken(self,ClientToken):
self.add_query_param('ClientToken',ClientToken)
def get_TransitRouterAttachmentName(self):
return self.get_query_params().get('TransitRouterAttachmentName')
def set_TransitRouterAttachmentName(self,TransitRouterAttachmentName):
self.add_query_param('TransitRouterAttachmentName',TransitRouterAttachmentName)
def get_DryRun(self):
return self.get_query_params().get('DryRun')
def set_DryRun(self,DryRun):
self.add_query_param('DryRun',DryRun)
def get_ResourceOwnerAccount(self):
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self,ResourceOwnerAccount):
self.add_query_param('ResourceOwnerAccount',ResourceOwnerAccount)
def get_OwnerAccount(self):
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self,OwnerAccount):
self.add_query_param('OwnerAccount',OwnerAccount)
def get_OwnerId(self):
return self.get_query_params().get('OwnerId')
def set_OwnerId(self,OwnerId):
self.add_query_param('OwnerId',OwnerId)
def get_TransitRouterAttachmentId(self):
return self.get_query_params().get('TransitRouterAttachmentId')
def set_TransitRouterAttachmentId(self,TransitRouterAttachmentId):
self.add_query_param('TransitRouterAttachmentId',TransitRouterAttachmentId)
def get_TransitRouterAttachmentDescription(self):
return self.get_query_params().get('TransitRouterAttachmentDescription')
def set_TransitRouterAttachmentDescription(self,TransitRouterAttachmentDescription):
self.add_query_param('TransitRouterAttachmentDescription',TransitRouterAttachmentDescription) | set_ResourceOwnerId |
layer_norm.py | import torch
import torch.nn as nn
class LayerNorm(nn.Module):
|
class T5LayerNorm(nn.Module):
"""
Construct a layernorm module in the T5 style No bias and no subtraction of mean.
"""
def __init__(self, hidden_size, eps=1e-6):
super().__init__()
self.weight = nn.Parameter(torch.ones(hidden_size))
self.variance_epsilon = eps
def forward(self, hidden_states):
# layer norm should always be calculated in float32
variance = hidden_states.to(torch.float32).pow(2).mean(-1, keepdim=True)
hidden_states = hidden_states * torch.rsqrt(variance + self.variance_epsilon)
return self.weight * hidden_states.type_as(self.weight)
| """
Layer Normalization.
https://arxiv.org/abs/1607.06450
"""
def __init__(self, hidden_size, eps=1e-6):
super(LayerNorm, self).__init__()
self.eps = eps
self.gamma = nn.Parameter(torch.ones(hidden_size))
self.beta = nn.Parameter(torch.zeros(hidden_size))
def forward(self, x):
mean = x.mean(-1, keepdim=True)
std = x.std(-1, keepdim=True)
hidden_states = self.gamma * (x-mean) / (std + self.eps)
return hidden_states + self.beta |
serializers.go | // Code generated by smithy-go-codegen DO NOT EDIT.
package marketplacemetering
import (
"bytes"
"context"
"fmt"
"github.com/aws/aws-sdk-go-v2/service/marketplacemetering/types"
smithy "github.com/aws/smithy-go"
"github.com/aws/smithy-go/encoding/httpbinding"
smithyjson "github.com/aws/smithy-go/encoding/json"
"github.com/aws/smithy-go/middleware"
smithytime "github.com/aws/smithy-go/time"
smithyhttp "github.com/aws/smithy-go/transport/http"
"path"
)
type awsAwsjson11_serializeOpBatchMeterUsage struct {
}
func (*awsAwsjson11_serializeOpBatchMeterUsage) ID() string {
return "OperationSerializer"
}
func (m *awsAwsjson11_serializeOpBatchMeterUsage) HandleSerialize(ctx context.Context, in middleware.SerializeInput, next middleware.SerializeHandler) (
out middleware.SerializeOutput, metadata middleware.Metadata, err error,
) {
request, ok := in.Request.(*smithyhttp.Request)
if !ok {
return out, metadata, &smithy.SerializationError{Err: fmt.Errorf("unknown transport type %T", in.Request)}
}
input, ok := in.Parameters.(*BatchMeterUsageInput)
_ = input
if !ok {
return out, metadata, &smithy.SerializationError{Err: fmt.Errorf("unknown input parameters type %T", in.Parameters)}
}
operationPath := "/"
if len(request.Request.URL.Path) == 0 {
request.Request.URL.Path = operationPath
} else {
request.Request.URL.Path = path.Join(request.Request.URL.Path, operationPath)
if request.Request.URL.Path != "/" && operationPath[len(operationPath)-1] == '/' {
request.Request.URL.Path += "/"
}
}
request.Request.Method = "POST"
httpBindingEncoder, err := httpbinding.NewEncoder(request.URL.Path, request.URL.RawQuery, request.Header)
if err != nil {
return out, metadata, &smithy.SerializationError{Err: err}
}
httpBindingEncoder.SetHeader("Content-Type").String("application/x-amz-json-1.1")
httpBindingEncoder.SetHeader("X-Amz-Target").String("AWSMPMeteringService.BatchMeterUsage")
jsonEncoder := smithyjson.NewEncoder()
if err := awsAwsjson11_serializeOpDocumentBatchMeterUsageInput(input, jsonEncoder.Value); err != nil {
return out, metadata, &smithy.SerializationError{Err: err}
}
if request, err = request.SetStream(bytes.NewReader(jsonEncoder.Bytes())); err != nil {
return out, metadata, &smithy.SerializationError{Err: err}
}
if request.Request, err = httpBindingEncoder.Encode(request.Request); err != nil {
return out, metadata, &smithy.SerializationError{Err: err}
}
in.Request = request
return next.HandleSerialize(ctx, in)
}
type awsAwsjson11_serializeOpMeterUsage struct {
}
func (*awsAwsjson11_serializeOpMeterUsage) ID() string {
return "OperationSerializer"
}
func (m *awsAwsjson11_serializeOpMeterUsage) HandleSerialize(ctx context.Context, in middleware.SerializeInput, next middleware.SerializeHandler) (
out middleware.SerializeOutput, metadata middleware.Metadata, err error,
) {
request, ok := in.Request.(*smithyhttp.Request)
if !ok {
return out, metadata, &smithy.SerializationError{Err: fmt.Errorf("unknown transport type %T", in.Request)}
}
input, ok := in.Parameters.(*MeterUsageInput)
_ = input
if !ok {
return out, metadata, &smithy.SerializationError{Err: fmt.Errorf("unknown input parameters type %T", in.Parameters)}
}
operationPath := "/"
if len(request.Request.URL.Path) == 0 {
request.Request.URL.Path = operationPath
} else {
request.Request.URL.Path = path.Join(request.Request.URL.Path, operationPath)
if request.Request.URL.Path != "/" && operationPath[len(operationPath)-1] == '/' {
request.Request.URL.Path += "/"
}
}
request.Request.Method = "POST"
httpBindingEncoder, err := httpbinding.NewEncoder(request.URL.Path, request.URL.RawQuery, request.Header)
if err != nil {
return out, metadata, &smithy.SerializationError{Err: err}
}
httpBindingEncoder.SetHeader("Content-Type").String("application/x-amz-json-1.1")
httpBindingEncoder.SetHeader("X-Amz-Target").String("AWSMPMeteringService.MeterUsage")
jsonEncoder := smithyjson.NewEncoder()
if err := awsAwsjson11_serializeOpDocumentMeterUsageInput(input, jsonEncoder.Value); err != nil {
return out, metadata, &smithy.SerializationError{Err: err}
}
if request, err = request.SetStream(bytes.NewReader(jsonEncoder.Bytes())); err != nil {
return out, metadata, &smithy.SerializationError{Err: err}
}
if request.Request, err = httpBindingEncoder.Encode(request.Request); err != nil {
return out, metadata, &smithy.SerializationError{Err: err}
}
in.Request = request
return next.HandleSerialize(ctx, in)
}
type awsAwsjson11_serializeOpRegisterUsage struct {
}
func (*awsAwsjson11_serializeOpRegisterUsage) ID() string {
return "OperationSerializer"
}
func (m *awsAwsjson11_serializeOpRegisterUsage) HandleSerialize(ctx context.Context, in middleware.SerializeInput, next middleware.SerializeHandler) (
out middleware.SerializeOutput, metadata middleware.Metadata, err error,
) {
request, ok := in.Request.(*smithyhttp.Request)
if !ok {
return out, metadata, &smithy.SerializationError{Err: fmt.Errorf("unknown transport type %T", in.Request)}
}
input, ok := in.Parameters.(*RegisterUsageInput)
_ = input
if !ok {
return out, metadata, &smithy.SerializationError{Err: fmt.Errorf("unknown input parameters type %T", in.Parameters)}
}
operationPath := "/"
if len(request.Request.URL.Path) == 0 {
request.Request.URL.Path = operationPath
} else {
request.Request.URL.Path = path.Join(request.Request.URL.Path, operationPath)
if request.Request.URL.Path != "/" && operationPath[len(operationPath)-1] == '/' {
request.Request.URL.Path += "/"
}
}
request.Request.Method = "POST"
httpBindingEncoder, err := httpbinding.NewEncoder(request.URL.Path, request.URL.RawQuery, request.Header)
if err != nil {
return out, metadata, &smithy.SerializationError{Err: err}
}
httpBindingEncoder.SetHeader("Content-Type").String("application/x-amz-json-1.1")
httpBindingEncoder.SetHeader("X-Amz-Target").String("AWSMPMeteringService.RegisterUsage")
jsonEncoder := smithyjson.NewEncoder()
if err := awsAwsjson11_serializeOpDocumentRegisterUsageInput(input, jsonEncoder.Value); err != nil |
if request, err = request.SetStream(bytes.NewReader(jsonEncoder.Bytes())); err != nil {
return out, metadata, &smithy.SerializationError{Err: err}
}
if request.Request, err = httpBindingEncoder.Encode(request.Request); err != nil {
return out, metadata, &smithy.SerializationError{Err: err}
}
in.Request = request
return next.HandleSerialize(ctx, in)
}
type awsAwsjson11_serializeOpResolveCustomer struct {
}
func (*awsAwsjson11_serializeOpResolveCustomer) ID() string {
return "OperationSerializer"
}
func (m *awsAwsjson11_serializeOpResolveCustomer) HandleSerialize(ctx context.Context, in middleware.SerializeInput, next middleware.SerializeHandler) (
out middleware.SerializeOutput, metadata middleware.Metadata, err error,
) {
request, ok := in.Request.(*smithyhttp.Request)
if !ok {
return out, metadata, &smithy.SerializationError{Err: fmt.Errorf("unknown transport type %T", in.Request)}
}
input, ok := in.Parameters.(*ResolveCustomerInput)
_ = input
if !ok {
return out, metadata, &smithy.SerializationError{Err: fmt.Errorf("unknown input parameters type %T", in.Parameters)}
}
operationPath := "/"
if len(request.Request.URL.Path) == 0 {
request.Request.URL.Path = operationPath
} else {
request.Request.URL.Path = path.Join(request.Request.URL.Path, operationPath)
if request.Request.URL.Path != "/" && operationPath[len(operationPath)-1] == '/' {
request.Request.URL.Path += "/"
}
}
request.Request.Method = "POST"
httpBindingEncoder, err := httpbinding.NewEncoder(request.URL.Path, request.URL.RawQuery, request.Header)
if err != nil {
return out, metadata, &smithy.SerializationError{Err: err}
}
httpBindingEncoder.SetHeader("Content-Type").String("application/x-amz-json-1.1")
httpBindingEncoder.SetHeader("X-Amz-Target").String("AWSMPMeteringService.ResolveCustomer")
jsonEncoder := smithyjson.NewEncoder()
if err := awsAwsjson11_serializeOpDocumentResolveCustomerInput(input, jsonEncoder.Value); err != nil {
return out, metadata, &smithy.SerializationError{Err: err}
}
if request, err = request.SetStream(bytes.NewReader(jsonEncoder.Bytes())); err != nil {
return out, metadata, &smithy.SerializationError{Err: err}
}
if request.Request, err = httpBindingEncoder.Encode(request.Request); err != nil {
return out, metadata, &smithy.SerializationError{Err: err}
}
in.Request = request
return next.HandleSerialize(ctx, in)
}
func awsAwsjson11_serializeDocumentTag(v *types.Tag, value smithyjson.Value) error {
object := value.Object()
defer object.Close()
if v.Key != nil {
ok := object.Key("Key")
ok.String(*v.Key)
}
if v.Value != nil {
ok := object.Key("Value")
ok.String(*v.Value)
}
return nil
}
func awsAwsjson11_serializeDocumentTagList(v []types.Tag, value smithyjson.Value) error {
array := value.Array()
defer array.Close()
for i := range v {
av := array.Value()
if err := awsAwsjson11_serializeDocumentTag(&v[i], av); err != nil {
return err
}
}
return nil
}
func awsAwsjson11_serializeDocumentUsageAllocation(v *types.UsageAllocation, value smithyjson.Value) error {
object := value.Object()
defer object.Close()
if v.AllocatedUsageQuantity != nil {
ok := object.Key("AllocatedUsageQuantity")
ok.Integer(*v.AllocatedUsageQuantity)
}
if v.Tags != nil {
ok := object.Key("Tags")
if err := awsAwsjson11_serializeDocumentTagList(v.Tags, ok); err != nil {
return err
}
}
return nil
}
func awsAwsjson11_serializeDocumentUsageAllocations(v []types.UsageAllocation, value smithyjson.Value) error {
array := value.Array()
defer array.Close()
for i := range v {
av := array.Value()
if err := awsAwsjson11_serializeDocumentUsageAllocation(&v[i], av); err != nil {
return err
}
}
return nil
}
func awsAwsjson11_serializeDocumentUsageRecord(v *types.UsageRecord, value smithyjson.Value) error {
object := value.Object()
defer object.Close()
if v.CustomerIdentifier != nil {
ok := object.Key("CustomerIdentifier")
ok.String(*v.CustomerIdentifier)
}
if v.Dimension != nil {
ok := object.Key("Dimension")
ok.String(*v.Dimension)
}
if v.Quantity != nil {
ok := object.Key("Quantity")
ok.Integer(*v.Quantity)
}
if v.Timestamp != nil {
ok := object.Key("Timestamp")
ok.Double(smithytime.FormatEpochSeconds(*v.Timestamp))
}
if v.UsageAllocations != nil {
ok := object.Key("UsageAllocations")
if err := awsAwsjson11_serializeDocumentUsageAllocations(v.UsageAllocations, ok); err != nil {
return err
}
}
return nil
}
func awsAwsjson11_serializeDocumentUsageRecordList(v []types.UsageRecord, value smithyjson.Value) error {
array := value.Array()
defer array.Close()
for i := range v {
av := array.Value()
if err := awsAwsjson11_serializeDocumentUsageRecord(&v[i], av); err != nil {
return err
}
}
return nil
}
func awsAwsjson11_serializeOpDocumentBatchMeterUsageInput(v *BatchMeterUsageInput, value smithyjson.Value) error {
object := value.Object()
defer object.Close()
if v.ProductCode != nil {
ok := object.Key("ProductCode")
ok.String(*v.ProductCode)
}
if v.UsageRecords != nil {
ok := object.Key("UsageRecords")
if err := awsAwsjson11_serializeDocumentUsageRecordList(v.UsageRecords, ok); err != nil {
return err
}
}
return nil
}
func awsAwsjson11_serializeOpDocumentMeterUsageInput(v *MeterUsageInput, value smithyjson.Value) error {
object := value.Object()
defer object.Close()
if v.DryRun != nil {
ok := object.Key("DryRun")
ok.Boolean(*v.DryRun)
}
if v.ProductCode != nil {
ok := object.Key("ProductCode")
ok.String(*v.ProductCode)
}
if v.Timestamp != nil {
ok := object.Key("Timestamp")
ok.Double(smithytime.FormatEpochSeconds(*v.Timestamp))
}
if v.UsageAllocations != nil {
ok := object.Key("UsageAllocations")
if err := awsAwsjson11_serializeDocumentUsageAllocations(v.UsageAllocations, ok); err != nil {
return err
}
}
if v.UsageDimension != nil {
ok := object.Key("UsageDimension")
ok.String(*v.UsageDimension)
}
if v.UsageQuantity != nil {
ok := object.Key("UsageQuantity")
ok.Integer(*v.UsageQuantity)
}
return nil
}
func awsAwsjson11_serializeOpDocumentRegisterUsageInput(v *RegisterUsageInput, value smithyjson.Value) error {
object := value.Object()
defer object.Close()
if v.Nonce != nil {
ok := object.Key("Nonce")
ok.String(*v.Nonce)
}
if v.ProductCode != nil {
ok := object.Key("ProductCode")
ok.String(*v.ProductCode)
}
if v.PublicKeyVersion != nil {
ok := object.Key("PublicKeyVersion")
ok.Integer(*v.PublicKeyVersion)
}
return nil
}
func awsAwsjson11_serializeOpDocumentResolveCustomerInput(v *ResolveCustomerInput, value smithyjson.Value) error {
object := value.Object()
defer object.Close()
if v.RegistrationToken != nil {
ok := object.Key("RegistrationToken")
ok.String(*v.RegistrationToken)
}
return nil
}
| {
return out, metadata, &smithy.SerializationError{Err: err}
} |
test_code_style.py | """
@brief test log(time=150s)
"""
import os
import unittest | from pyquickhelper.pycode import check_pep8, ExtTestCase
class TestCodeStyle(ExtTestCase):
"""Test style."""
def test_style_src(self):
thi = os.path.abspath(os.path.dirname(__file__))
src_ = os.path.normpath(os.path.join(thi, "..", "..", "src"))
check_pep8(src_, fLOG=fLOG,
pylint_ignore=('C0103', 'C1801', 'R0201', 'R1705', 'W0108', 'W0613',
'C0111', 'W0223', 'W0201', 'W0212', 'C0415', 'C0209'),
skip=["Parameters differ from overridden 'fit' method",
"Module 'numpy.random' has no 'RandomState' member",
"Instance of 'SkLearnParameters' has no '",
" in module 'sklearn.cluster._k_means'",
"Instance of 'Struct' has no '",
])
def test_style_test(self):
thi = os.path.abspath(os.path.dirname(__file__))
test = os.path.normpath(os.path.join(thi, "..", ))
check_pep8(test, fLOG=fLOG, neg_pattern="temp_.*",
pylint_ignore=('C0103', 'C1801', 'R0201', 'R1705', 'W0108', 'W0613',
'C0111', 'W0612', 'E0632', 'C0415', 'C0209'),
skip=["Instance of 'tuple' has no ",
])
if __name__ == "__main__":
unittest.main() | from pyquickhelper.loghelper import fLOG |
mgan_head.py | import torch.nn as nn
from ..registry import HEADS
from ..utils import ConvModule
from mmdetection.core import auto_fp16
@HEADS.register_module
class MGANHead(nn.Module):
def __init__(self,
num_convs=2,
roi_feat_size=7,
in_channels=512,
conv_out_channels=512,
conv_cfg=None,
norm_cfg=None):
super(MGANHead, self).__init__()
self.num_convs = num_convs
self.roi_feat_size = roi_feat_size
self.in_channels = in_channels
self.conv_out_channels = conv_out_channels
self.conv_cfg = conv_cfg
self.norm_cfg = norm_cfg
self.fp16_enabled = False
self.convs = nn.ModuleList()
for i in range(self.num_convs):
in_channels = (
self.in_channels if i == 0 else self.conv_out_channels)
self.convs.append(
ConvModule(
in_channels,
self.conv_out_channels, | padding=1,
conv_cfg=conv_cfg,
norm_cfg=norm_cfg))
logits_in_channel = self.conv_out_channels
self.conv_logits = nn.Conv2d(logits_in_channel, 1, 1)
self.relu = nn.ReLU(inplace=True)
self.debug_imgs = None
@auto_fp16()
def forward(self, x):
for conv in self.convs:
x = conv(x)
x = self.conv_logits(x).sigmoid() * x
return x | 3, |
index.ts | //================================================================
export * as billing from "./billing";
export * as internal from "./internal";
export * as payments from "./payments";
export * as virtual_accounts from "./virtual_accounts"; | /**
* @packageDocumentation
* @module api.functional
*/ |
|
authorizeec2securitygroupingress_controller.go | /*
Copyright 2018 Jeff Nickoloff ([email protected]).
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package authorizeec2securitygroupingress
import (
"context"
"encoding/json"
"fmt"
aws "github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/awserr"
awssession "github.com/aws/aws-sdk-go/aws/session"
ec2 "github.com/aws/aws-sdk-go/service/ec2"
eccv1alpha1 "github.com/gotopple/kloudformation/pkg/apis/ecc/v1alpha1"
"k8s.io/apimachinery/pkg/api/errors"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/types"
"k8s.io/client-go/tools/record"
"sigs.k8s.io/controller-runtime/pkg/client"
"sigs.k8s.io/controller-runtime/pkg/controller"
"sigs.k8s.io/controller-runtime/pkg/handler"
"sigs.k8s.io/controller-runtime/pkg/manager"
"sigs.k8s.io/controller-runtime/pkg/reconcile"
"sigs.k8s.io/controller-runtime/pkg/source"
)
// Add creates a new authorizeec2securitygroupingress Controller and adds it to the Manager with default RBAC. The Manager will set fields on the Controller
// and Start it when the Manager is Started.
func Add(mgr manager.Manager) error {
return add(mgr, newReconciler(mgr))
}
// newReconciler returns a new reconcile.Reconciler
func newReconciler(mgr manager.Manager) reconcile.Reconciler {
sess := awssession.Must(awssession.NewSessionWithOptions(awssession.Options{
SharedConfigState: awssession.SharedConfigEnable,
}))
r := mgr.GetRecorder(`authorizeec2securitygroupingress-controller`)
return &ReconcileAuthorizeEC2SecurityGroupIngress{Client: mgr.GetClient(), scheme: mgr.GetScheme(), sess: sess, events: r}
}
// add adds a new Controller to mgr with r as the reconcile.Reconciler
func add(mgr manager.Manager, r reconcile.Reconciler) error {
// Create a new controller
c, err := controller.New("authorizeec2securitygroupingress-controller", mgr, controller.Options{Reconciler: r})
if err != nil {
return err
}
// Watch for changes to AuthorizeEC2SecurityGroupIngress
err = c.Watch(&source.Kind{Type: &eccv1alpha1.AuthorizeEC2SecurityGroupIngress{}}, &handler.EnqueueRequestForObject{})
if err != nil {
return err
}
return nil
}
var _ reconcile.Reconciler = &ReconcileAuthorizeEC2SecurityGroupIngress{}
// ReconcileAuthorizeEC2SecurityGroupIngress reconciles a AuthorizeEC2SecurityGroupIngress object
type ReconcileAuthorizeEC2SecurityGroupIngress struct {
client.Client
scheme *runtime.Scheme
sess *awssession.Session
events record.EventRecorder
}
// Reconcile reads that state of the cluster for a AuthorizeEC2SecurityGroupIngress object and makes changes based on the state read
// and what is in the AuthorizeEC2SecurityGroupIngress.Spec
// Automatically generate RBAC rules to allow the Controller to read and write Deployments
// +kubebuilder:rbac:groups=ecc.aws.gotopple.com,resources=authorizeec2securitygroupingress,verbs=get;list;watch;create;update;patch;delete
func (r *ReconcileAuthorizeEC2SecurityGroupIngress) Reconcile(request reconcile.Request) (reconcile.Result, error) {
// Fetch the AuthorizeEC2SecurityGroupIngress instance
instance := &eccv1alpha1.AuthorizeEC2SecurityGroupIngress{}
err := r.Get(context.TODO(), request.NamespacedName, instance)
if err != nil {
if errors.IsNotFound(err) {
// Object not found, return. Created objects are automatically garbage collected.
// For additional cleanup logic use finalizers.
return reconcile.Result{}, nil
}
// Error reading the object - requeue the request.
return reconcile.Result{}, err
}
svc := ec2.New(r.sess)
// get the AuthorizeEC2SecurityGroupIngressId out of the annotations
// if absent then create
ingressAuthorized, ok := instance.ObjectMeta.Annotations[`ingressAuthorized`]
if !ok {
// Need to lookup the security group to attach to
ec2SecurityGroup := &eccv1alpha1.EC2SecurityGroup{}
err = r.Get(context.TODO(), types.NamespacedName{Name: instance.Spec.EC2SecurityGroupName, Namespace: instance.Namespace}, ec2SecurityGroup)
if err != nil {
if errors.IsNotFound(err) {
r.events.Eventf(instance, `Warning`, `CreateFailure`, "Can't find EC2SecurityGroup")
return reconcile.Result{}, fmt.Errorf(`EC2SecurityGroup not ready`)
}
return reconcile.Result{}, err
} else if len(ec2SecurityGroup.ObjectMeta.Annotations[`ec2SecurityGroupId`]) <= 0 {
r.events.Eventf(instance, `Warning`, `CreateFailure`, "EC2SecurityGroup has no ID annotation")
return reconcile.Result{}, fmt.Errorf(`EC2SecurityGroup not ready`)
}
r.events.Eventf(instance, `Normal`, `CreateAttempt`, "Creating AWS AuthorizeEC2SecurityGroupIngress in %s", *r.sess.Config.Region)
authorizeOutput, err := svc.AuthorizeSecurityGroupIngress(&ec2.AuthorizeSecurityGroupIngressInput{
CidrIp: aws.String(instance.Spec.SourceCidrIp),
FromPort: aws.Int64(instance.Spec.FromPort),
GroupId: aws.String(ec2SecurityGroup.ObjectMeta.Annotations[`ec2SecurityGroupId`]),
IpProtocol: aws.String(instance.Spec.IpProtocol),
ToPort: aws.Int64(instance.Spec.ToPort),
})
if err != nil {
r.events.Eventf(instance, `Warning`, `AuthorizeFailure`, "Create failed: %s", err.Error())
return reconcile.Result{}, err
}
if authorizeOutput == nil {
return reconcile.Result{}, fmt.Errorf(`AuthorizeOutput was nil`)
}
ingressAuthorized = "yes"
r.events.Eventf(instance, `Normal`, `Created`, "Created AWS AuthorizeEC2SecurityGroupIngress for EC2SecurityGroup (%s)", ec2SecurityGroup.ObjectMeta.Annotations[`ec2SecurityGroupId`])
instance.ObjectMeta.Annotations = make(map[string]string)
instance.ObjectMeta.Annotations[`ingressAuthorized`] = ingressAuthorized
instance.ObjectMeta.Finalizers = append(instance.ObjectMeta.Finalizers, `authorizeec2securitygroupingress.ecc.aws.gotopple.com`)
// check if security group already has a finalizer placed on it by this controller | if i == `authorizeec2securitygroupingress.ecc.aws.gotopple.com` {
ingressFinalizerPresent = true
}
}
// If there isn't a finalizer already on the security group, place it and create an empty list for ingress rules
if ingressFinalizerPresent != true {
ec2SecurityGroup.ObjectMeta.Finalizers = append(ec2SecurityGroup.ObjectMeta.Finalizers, `authorizeec2securitygroupingress.ecc.aws.gotopple.com`)
ec2SecurityGroup.ObjectMeta.Annotations[`ingressRules`] = `[]`
}
//add the rule being created to the security groups annotations
ruleList := []string{}
err = json.Unmarshal([]byte(ec2SecurityGroup.ObjectMeta.Annotations[`ingressRules`]), &ruleList)
if err != nil {
r.events.Eventf(instance, `Warning`, `ResourceUpdateFailure`, `Failed to parse ingress rules`)
}
ruleList = append(ruleList, instance.Name)
newAnnotation, err := json.Marshal(ruleList)
if err != nil {
r.events.Eventf(instance, `Warning`, `ResourceUpdateFailure`, `Failed to update ingress rules`)
}
ec2SecurityGroup.ObjectMeta.Annotations[`ingressRules`] = string(newAnnotation)
r.events.Event(instance, `Normal`, `Annotated`, "Added finalizer to Security Group")
r.events.Event(ec2SecurityGroup, `Normal`, `Annotated`, "Added finalizer to Security Group")
//update the Security Group, now that it's done.
err = r.Update(context.TODO(), ec2SecurityGroup)
if err != nil {
r.events.Eventf(instance, `Warning`, `ResourceUpdateFailure`, `Couldn't update Security Group annotations: %s`, err.Error())
r.events.Eventf(ec2SecurityGroup, `Warning`, `ResourceUpdateFailure`, `Couldn't update Security Group annotations: %s`, err.Error())
}
err = r.Update(context.TODO(), instance)
if err != nil {
// If the call to update the resource annotations has failed then
// the AuthorizeEC2SecurityGroupIngress resource will not be able to track the created AuthorizeEC2SecurityGroupIngress and
// no finalizer will have been appended.
//
// This routine should attempt to delete the AWS AuthorizeEC2SecurityGroupIngress before
// returning the error and retrying.
r.events.Eventf(instance,
`Warning`,
`ResourceUpdateFailure`,
"Failed to update the resource: %s", err.Error())
revokeSecurityGroupIngressOutput, ierr := svc.RevokeSecurityGroupIngress(&ec2.RevokeSecurityGroupIngressInput{
CidrIp: aws.String(instance.Spec.SourceCidrIp),
FromPort: aws.Int64(instance.Spec.FromPort),
GroupId: aws.String(ec2SecurityGroup.ObjectMeta.Annotations[`ec2SecurityGroupId`]),
IpProtocol: aws.String(instance.Spec.IpProtocol),
ToPort: aws.Int64(instance.Spec.ToPort),
})
if ierr != nil {
// Send an appropriate event that has been annotated
// for async AWS resource GC.
r.events.AnnotatedEventf(instance,
map[string]string{`cleanupIngressRule`: ec2SecurityGroup.ObjectMeta.Annotations[`ec2SecurityGroupId`]},
`Warning`,
`DeleteFailure`,
"Unable to delete (revoke) the AuthorizeEC2SecurityGroupIngress: %s", ierr.Error())
if aerr, ok := ierr.(awserr.Error); ok {
switch aerr.Code() {
default:
fmt.Println(aerr.Error())
}
} else {
// Print the error, cast err to awserr.Error to get the Code and
// Message from an error.
fmt.Println(ierr.Error())
}
} else if revokeSecurityGroupIngressOutput == nil {
// Send an appropriate event that has been annotated
// for async AWS resource GC.
r.events.AnnotatedEventf(instance,
map[string]string{`cleanupIngressRule`: ec2SecurityGroup.ObjectMeta.Annotations[`ec2SecurityGroupId`]},
`Warning`,
`DeleteAmbiguity`,
"Attempt to delete the AuthorizeEC2SecurityGroupIngress recieved a nil response")
return reconcile.Result{}, fmt.Errorf(`RevokeSecurityGroupIngressOutput was nil`)
}
return reconcile.Result{}, err
}
r.events.Event(instance, `Normal`, `Annotated`, "Added finalizer and annotations")
} else if instance.ObjectMeta.DeletionTimestamp != nil {
// Need to lookup the security group to attach to
ec2SecurityGroupFound := true
ec2SecurityGroup := &eccv1alpha1.EC2SecurityGroup{}
err = r.Get(context.TODO(), types.NamespacedName{Name: instance.Spec.EC2SecurityGroupName, Namespace: instance.Namespace}, ec2SecurityGroup)
if err != nil {
if errors.IsNotFound(err) {
r.events.Eventf(instance, `Warning`, `CreateFailure`, "Can't find EC2SecurityGroup- Will attempt to delete anyway")
ec2SecurityGroupFound = false
}
} else if len(ec2SecurityGroup.ObjectMeta.Annotations[`ec2SecurityGroupId`]) <= 0 {
r.events.Eventf(instance, `Warning`, `CreateFailure`, "EC2SecurityGroup has no ID annotation")
return reconcile.Result{}, fmt.Errorf(`EC2SecurityGroup not ready`)
}
// check for other Finalizers
for _, f := range instance.ObjectMeta.Finalizers {
if f != `authorizeec2securitygroupingress.ecc.aws.gotopple.com` {
r.events.Eventf(instance, `Warning`, `DeleteFailure`, "Unable to delete the AuthorizeEC2SecurityGroupIngress with remaining finalizers")
return reconcile.Result{}, fmt.Errorf(`Unable to delete the AuthorizeEC2SecurityGroupIngress with remaining finalizers`)
}
}
// must delete
if ec2SecurityGroupFound == true {
_, err = svc.RevokeSecurityGroupIngress(&ec2.RevokeSecurityGroupIngressInput{
CidrIp: aws.String(instance.Spec.SourceCidrIp),
FromPort: aws.Int64(instance.Spec.FromPort),
GroupId: aws.String(ec2SecurityGroup.ObjectMeta.Annotations[`ec2SecurityGroupId`]),
IpProtocol: aws.String(instance.Spec.IpProtocol),
ToPort: aws.Int64(instance.Spec.ToPort),
})
if err != nil {
r.events.Eventf(instance, `Warning`, `DeleteFailure`, "Unable to delete the AuthorizeEC2SecurityGroupIngress: %s", err.Error())
// Print the error, cast err to awserr.Error to get the Code and
// Message from an error.
if aerr, ok := err.(awserr.Error); ok {
switch aerr.Code() {
case `InvalidPermission.NotFound`:
// we want to keep going
r.events.Eventf(instance, `Normal`, `AlreadyDeleted`, "The AuthorizeEC2SecurityGroupIngress: %s was already deleted", err.Error())
default:
return reconcile.Result{}, err
}
} else {
return reconcile.Result{}, err
}
}
//remove the rule name from security group annotations
ruleList := []string{}
err = json.Unmarshal([]byte(ec2SecurityGroup.ObjectMeta.Annotations[`ingressRules`]), &ruleList)
if err != nil {
r.events.Eventf(instance, `Warning`, `ResourceUpdateFailure`, `Failed to parse ingress rules`)
}
for i, f := range ruleList {
if f == instance.Name {
ruleList = append(ruleList[:i], ruleList[i+1:]...)
}
}
newAnnotation, err := json.Marshal(ruleList)
if err != nil {
r.events.Eventf(instance, `Warning`, `ResourceUpdateFailure`, `Failed to update ingress rules`)
}
ec2SecurityGroup.ObjectMeta.Annotations[`ingressRules`] = string(newAnnotation)
//check if any rules remain
if ec2SecurityGroup.ObjectMeta.Annotations[`ingressRules`] == `[]` {
for i, f := range ec2SecurityGroup.ObjectMeta.Finalizers {
if f == `authorizeec2securitygroupingress.ecc.aws.gotopple.com` {
ec2SecurityGroup.ObjectMeta.Finalizers = append(
ec2SecurityGroup.ObjectMeta.Finalizers[:i],
ec2SecurityGroup.ObjectMeta.Finalizers[i+1:]...)
}
}
}
}
// remove the finalizer
for i, f := range instance.ObjectMeta.Finalizers {
if f == `authorizeec2securitygroupingress.ecc.aws.gotopple.com` {
instance.ObjectMeta.Finalizers = append(
instance.ObjectMeta.Finalizers[:i],
instance.ObjectMeta.Finalizers[i+1:]...)
}
}
if ec2SecurityGroupFound == true {
err = r.Update(context.TODO(), ec2SecurityGroup)
if err != nil {
r.events.Eventf(ec2SecurityGroup, `Warning`, `ResourceUpdateFailure`, "Unable to remove finalizer: %s", err.Error())
return reconcile.Result{}, err
}
r.events.Eventf(ec2SecurityGroup, `Normal`, `Deleted`, "Deleted finalizer: %s", `authorizeec2securitygroupingress.ecc.aws.gotopple.com`)
}
// after a successful delete update the resource with the removed finalizer
err = r.Update(context.TODO(), instance)
if err != nil {
r.events.Eventf(instance, `Warning`, `ResourceUpdateFailure`, "Unable to remove finalizer: %s", err.Error())
return reconcile.Result{}, err
}
r.events.Event(instance, `Normal`, `Deleted`, "Deleted AuthorizeEC2SecurityGroupIngress and removed finalizers")
}
return reconcile.Result{}, nil
} | ingressFinalizerPresent := false
for _, i := range ec2SecurityGroup.ObjectMeta.Finalizers { |
link.rs | use super::FileAction;
use crate::manifests::Manifest;
use crate::steps::Step;
use crate::{actions::Action, contexts::Contexts};
use serde::{Deserialize, Serialize};
use std::path::PathBuf;
use tracing::error;
#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize)]
pub struct FileLink {
pub from: Option<String>,
pub source: Option<String>,
pub target: Option<String>,
pub to: Option<String>,
}
impl FileLink {
fn source(&self) -> String {
if self.source.is_none() && self.from.is_none() {
error!("Field 'source' is required for file.link");
}
if let Some(ref source) = self.source {
source.to_string()
} else {
self.from.clone().unwrap()
}
}
fn target(&self) -> String {
if self.target.is_none() && self.to.is_none() {
error!("Field 'target' is required for file.link");
}
if let Some(ref target) = self.target {
target.to_string()
} else {
self.to.clone().unwrap()
}
}
}
impl FileAction for FileLink {}
impl Action for FileLink {
fn plan(&self, manifest: &Manifest, _: &Contexts) -> Vec<Step> {
use crate::atoms::directory::Create as DirCreate;
use crate::atoms::file::Link;
let from: PathBuf = match self.resolve(manifest, self.source().as_str()) {
Ok(from) => from,
Err(_) => {
error!("Failed to resolve path for file link");
return vec![];
}
};
let to = PathBuf::from(self.target());
let parent = to.clone();
vec![
Step {
atom: Box::new(DirCreate {
path: parent.parent().unwrap().into(),
}),
initializers: vec![],
finalizers: vec![],
},
Step {
atom: Box::new(Link {
source: from,
target: to,
}),
initializers: vec![],
finalizers: vec![],
},
]
}
}
#[cfg(test)]
mod tests {
use crate::actions::Actions;
#[test]
fn it_can_be_deserialized() |
}
| {
let yaml = r#"
- action: file.link
source: a
target: b
"#;
let mut actions: Vec<Actions> = serde_yaml::from_str(yaml).unwrap();
match actions.pop() {
Some(Actions::FileLink(action)) => {
assert_eq!("a", action.action.source());
assert_eq!("b", action.action.target());
}
_ => {
panic!("FileLink didn't deserialize to the correct type");
}
};
// Old style format
let yaml = r#"
- action: file.link
from: a
to: b
"#;
let mut actions: Vec<Actions> = serde_yaml::from_str(yaml).unwrap();
match actions.pop() {
Some(Actions::FileLink(action)) => {
assert_eq!("a", action.action.source());
assert_eq!("b", action.action.target());
}
_ => {
panic!("FileLink didn't deserialize to the correct type");
}
};
} |
build.rs | extern crate bindgen;
extern crate cc;
extern crate num_cpus;
extern crate pkg_config;
extern crate regex;
use std::env;
use std::fs::{self, create_dir, symlink_metadata, File};
use std::io::{self, BufRead, BufReader, Write};
use std::path::PathBuf;
use std::process::Command;
use std::str;
use bindgen::callbacks::{IntKind, ParseCallbacks};
use regex::Regex;
#[derive(Debug)]
struct Library {
name: &'static str,
is_feature: bool,
}
impl Library {
fn feature_name(&self) -> Option<String> {
if self.is_feature {
Some("CARGO_FEATURE_".to_string() + &self.name.to_uppercase())
} else {
None
}
}
}
static LIBRARIES: &[Library] = &[
Library {
name: "avcodec",
is_feature: true,
},
Library {
name: "avdevice",
is_feature: true,
},
Library {
name: "avfilter",
is_feature: true,
},
Library {
name: "avformat",
is_feature: true,
},
Library {
name: "avresample",
is_feature: true,
},
Library {
name: "avutil",
is_feature: false,
},
Library {
name: "postproc",
is_feature: true,
},
Library {
name: "swresample",
is_feature: true,
},
Library {
name: "swscale",
is_feature: true,
},
];
#[derive(Debug)]
struct IntCallbacks;
impl ParseCallbacks for IntCallbacks {
fn int_macro(&self, _name: &str, value: i64) -> Option<IntKind> {
let ch_layout = Regex::new(r"^AV_CH").unwrap();
let codec_cap = Regex::new(r"^AV_CODEC_CAP").unwrap();
let codec_flag = Regex::new(r"^AV_CODEC_FLAG").unwrap();
let error_max_size = Regex::new(r"^AV_ERROR_MAX_STRING_SIZE").unwrap();
if value >= i64::min_value() as i64
&& value <= i64::max_value() as i64
&& ch_layout.is_match(_name)
{
Some(IntKind::ULongLong)
} else if value >= i32::min_value() as i64
&& value <= i32::max_value() as i64
&& (codec_cap.is_match(_name) || codec_flag.is_match(_name))
{
Some(IntKind::UInt)
} else if error_max_size.is_match(_name) {
Some(IntKind::Custom {
name: "usize",
is_signed: false,
})
} else if value >= i32::min_value() as i64 && value <= i32::max_value() as i64 {
Some(IntKind::Int)
} else {
None
}
}
fn will_parse_macro(&self, name: &str) -> bindgen::callbacks::MacroParsingBehavior {
match name {
"FP_INFINITE" | "FP_NAN" | "FP_NORMAL" | "FP_SUBNORMAL" | "FP_ZERO" => {
bindgen::callbacks::MacroParsingBehavior::Ignore
}
_ => bindgen::callbacks::MacroParsingBehavior::Default,
}
}
}
fn version() -> String {
let major: u8 = env::var("CARGO_PKG_VERSION_MAJOR")
.unwrap()
.parse()
.unwrap();
let minor: u8 = env::var("CARGO_PKG_VERSION_MINOR")
.unwrap()
.parse()
.unwrap();
format!("{}.{}", major, minor)
}
fn output() -> PathBuf {
PathBuf::from(env::var("OUT_DIR").unwrap())
}
fn source() -> PathBuf {
output().join(format!("ffmpeg-{}", version()))
}
fn search() -> PathBuf {
let mut absolute = env::current_dir().unwrap();
absolute.push(&output());
absolute.push("dist");
absolute
}
fn fetch() -> io::Result<()> {
//If the directory is already there...
if output().join(format!("ffmpeg-{}", version())).exists() {
return Ok(());
}
let status = Command::new("git")
.current_dir(&output())
.arg("clone")
.arg("-b")
.arg(format!("release/{}", version()))
.arg("https://github.com/FFmpeg/FFmpeg")
.arg(format!("ffmpeg-{}", version()))
.status()?;
if status.success() {
Ok(())
} else {
Err(io::Error::new(io::ErrorKind::Other, "fetch failed"))
}
}
fn switch(configure: &mut Command, feature: &str, name: &str) {
let arg = if env::var("CARGO_FEATURE_".to_string() + feature).is_ok() {
"--enable-"
} else {
"--disable-"
};
configure.arg(arg.to_string() + name);
}
fn build() -> io::Result<()> {
let mut configure = Command::new("./configure");
configure.current_dir(&source());
configure.arg(format!("--prefix={}", search().to_string_lossy()));
if env::var("TARGET").unwrap() != env::var("HOST").unwrap() {
configure.arg(format!("--cross-prefix={}-", env::var("TARGET").unwrap()));
}
// control debug build
if env::var("DEBUG").is_ok() {
configure.arg("--enable-debug");
configure.arg("--disable-stripping");
} else {
configure.arg("--disable-debug");
configure.arg("--enable-stripping");
}
// make it static
configure.arg("--enable-static");
configure.arg("--disable-shared");
configure.arg("--disable-zlib");
configure.arg("--disable-bzlib");
configure.arg("--enable-pic");
// do not build programs since we don't need them
configure.arg("--disable-programs");
macro_rules! enable {
($conf:expr, $feat:expr, $name:expr) => {
if env::var(concat!("CARGO_FEATURE_", $feat)).is_ok() {
$conf.arg(concat!("--enable-", $name));
}
};
}
// macro_rules! disable {
// ($conf:expr, $feat:expr, $name:expr) => (
// if env::var(concat!("CARGO_FEATURE_", $feat)).is_err() {
// $conf.arg(concat!("--disable-", $name));
// }
// )
// }
// the binary using ffmpeg-sys must comply with GPL
switch(&mut configure, "BUILD_LICENSE_GPL", "gpl");
// the binary using ffmpeg-sys must comply with (L)GPLv3
switch(&mut configure, "BUILD_LICENSE_VERSION3", "version3");
// the binary using ffmpeg-sys cannot be redistributed
switch(&mut configure, "BUILD_LICENSE_NONFREE", "nonfree");
// configure building libraries based on features
for lib in LIBRARIES.iter().filter(|lib| lib.is_feature) {
switch(&mut configure, &lib.name.to_uppercase(), lib.name);
}
// configure external SSL libraries
enable!(configure, "BUILD_LIB_GNUTLS", "gnutls");
enable!(configure, "BUILD_LIB_OPENSSL", "openssl");
// configure external filters
enable!(configure, "BUILD_LIB_FONTCONFIG", "fontconfig");
enable!(configure, "BUILD_LIB_FREI0R", "frei0r");
enable!(configure, "BUILD_LIB_LADSPA", "ladspa");
enable!(configure, "BUILD_LIB_ASS", "libass");
enable!(configure, "BUILD_LIB_FREETYPE", "libfreetype");
enable!(configure, "BUILD_LIB_FRIBIDI", "libfribidi");
enable!(configure, "BUILD_LIB_OPENCV", "libopencv");
// configure external encoders/decoders
enable!(configure, "BUILD_LIB_AACPLUS", "libaacplus");
enable!(configure, "BUILD_LIB_CELT", "libcelt");
enable!(configure, "BUILD_LIB_DCADEC", "libdcadec");
enable!(configure, "BUILD_LIB_FAAC", "libfaac");
enable!(configure, "BUILD_LIB_FDK_AAC", "libfdk-aac");
enable!(configure, "BUILD_LIB_GSM", "libgsm");
enable!(configure, "BUILD_LIB_ILBC", "libilbc");
enable!(configure, "BUILD_LIB_VAZAAR", "libvazaar");
enable!(configure, "BUILD_LIB_MP3LAME", "libmp3lame");
enable!(configure, "BUILD_LIB_OPENCORE_AMRNB", "libopencore-amrnb");
enable!(configure, "BUILD_LIB_OPENCORE_AMRWB", "libopencore-amrwb");
enable!(configure, "BUILD_LIB_OPENH264", "libopenh264");
enable!(configure, "BUILD_LIB_OPENH265", "libopenh265");
enable!(configure, "BUILD_LIB_OPENJPEG", "libopenjpeg");
enable!(configure, "BUILD_LIB_OPUS", "libopus");
enable!(configure, "BUILD_LIB_SCHROEDINGER", "libschroedinger");
enable!(configure, "BUILD_LIB_SHINE", "libshine");
enable!(configure, "BUILD_LIB_SNAPPY", "libsnappy");
enable!(configure, "BUILD_LIB_SPEEX", "libspeex");
enable!(
configure,
"BUILD_LIB_STAGEFRIGHT_H264",
"libstagefright-h264"
);
enable!(configure, "BUILD_LIB_THEORA", "libtheora");
enable!(configure, "BUILD_LIB_TWOLAME", "libtwolame");
enable!(configure, "BUILD_LIB_UTVIDEO", "libutvideo");
enable!(configure, "BUILD_LIB_VO_AACENC", "libvo-aacenc");
enable!(configure, "BUILD_LIB_VO_AMRWBENC", "libvo-amrwbenc");
enable!(configure, "BUILD_LIB_VORBIS", "libvorbis");
enable!(configure, "BUILD_LIB_VPX", "libvpx");
enable!(configure, "BUILD_LIB_WAVPACK", "libwavpack");
enable!(configure, "BUILD_LIB_WEBP", "libwebp");
enable!(configure, "BUILD_LIB_X264", "libx264");
enable!(configure, "BUILD_LIB_X265", "libx265");
enable!(configure, "BUILD_LIB_AVS", "libavs");
enable!(configure, "BUILD_LIB_XVID", "libxvid");
// other external libraries
enable!(configure, "BUILD_NVENC", "nvenc");
// configure external protocols
enable!(configure, "BUILD_LIB_SMBCLIENT", "libsmbclient");
enable!(configure, "BUILD_LIB_SSH", "libssh");
// configure misc build options
enable!(configure, "BUILD_PIC", "pic");
// run ./configure
let output = configure
.output()
.expect(&format!("{:?} failed", configure));
if !output.status.success() {
println!("configure: {}", String::from_utf8_lossy(&output.stdout));
return Err(io::Error::new(
io::ErrorKind::Other,
format!(
"configure failed {}",
String::from_utf8_lossy(&output.stderr)
),
));
}
// run make
if !Command::new("make")
.arg("-j")
.arg(num_cpus::get().to_string())
.current_dir(&source())
.status()?
.success()
{
return Err(io::Error::new(io::ErrorKind::Other, "make failed"));
}
// run make install
if !Command::new("make")
.current_dir(&source())
.arg("install")
.status()?
.success()
{
return Err(io::Error::new(io::ErrorKind::Other, "make install failed"));
}
Ok(())
}
fn check_features(
include_paths: Vec<PathBuf>,
infos: &Vec<(&'static str, Option<&'static str>, &'static str)>,
) {
let mut includes_code = String::new();
let mut main_code = String::new();
for &(header, feature, var) in infos {
if let Some(feature) = feature {
if env::var(format!("CARGO_FEATURE_{}", feature.to_uppercase())).is_err() {
continue;
}
}
let include = format!("#include <{}>", header);
if includes_code.find(&include).is_none() {
includes_code.push_str(&include);
includes_code.push_str(&"\n");
}
includes_code.push_str(&format!(
r#"
#ifndef {var}
#define {var} 0
#define {var}_is_defined 0
#else
#define {var}_is_defined 1
#endif
"#,
var = var
));
main_code.push_str(&format!(
r#"printf("[{var}]%d%d\n", {var}, {var}_is_defined);"#,
var = var
));
}
let version_check_info = [("avcodec", 56, 60, 0, 80)];
for &(lib, begin_version_major, end_version_major, begin_version_minor, end_version_minor) in
version_check_info.iter()
{
for version_major in begin_version_major..end_version_major {
for version_minor in begin_version_minor..end_version_minor {
main_code.push_str(&format!(
r#"printf("[{lib}_version_greater_than_{version_major}_{version_minor}]%d\n", LIB{lib_uppercase}_VERSION_MAJOR > {version_major} || (LIB{lib_uppercase}_VERSION_MAJOR == {version_major} && LIB{lib_uppercase}_VERSION_MINOR > {version_minor}));"#,
lib = lib,
lib_uppercase = lib.to_uppercase(),
version_major = version_major,
version_minor = version_minor
));
}
}
}
let out_dir = output();
write!(
File::create(out_dir.join("check.c")).expect("Failed to create file"),
r#"
#include <stdio.h>
{includes_code}
int main()
{{
{main_code}
return 0;
}}
"#,
includes_code = includes_code,
main_code = main_code
)
.expect("Write failed");
let executable = out_dir.join(if cfg!(windows) { "check.exe" } else { "check" });
let mut compiler = cc::Build::new().get_compiler().to_command();
for dir in include_paths {
compiler.arg("-I");
compiler.arg(dir.to_string_lossy().into_owned());
}
if !compiler
.current_dir(&out_dir)
.arg("-o")
.arg(&executable)
.arg("check.c")
.status()
.expect("Command failed")
.success()
{
panic!("Compile failed");
}
let stdout_raw = Command::new(out_dir.join(&executable))
.current_dir(&out_dir)
.output()
.expect("Check failed")
.stdout;
let stdout = str::from_utf8(stdout_raw.as_slice()).unwrap();
println!("stdout={}", stdout);
for &(_, feature, var) in infos {
if let Some(feature) = feature {
if env::var(format!("CARGO_FEATURE_{}", feature.to_uppercase())).is_err() {
continue;
}
}
let var_str = format!("[{var}]", var = var);
let pos = stdout.find(&var_str).expect("Variable not found in output") + var_str.len();
if &stdout[pos..pos + 1] == "1" {
println!(r#"cargo:rustc-cfg=feature="{}""#, var.to_lowercase());
println!(r#"cargo:{}=true"#, var.to_lowercase());
}
// Also find out if defined or not (useful for cases where only the definition of a macro
// can be used as distinction)
if &stdout[pos + 1..pos + 2] == "1" {
println!(
r#"cargo:rustc-cfg=feature="{}_is_defined""#,
var.to_lowercase()
);
println!(r#"cargo:{}_is_defined=true"#, var.to_lowercase());
}
}
for &(lib, begin_version_major, end_version_major, begin_version_minor, end_version_minor) in
version_check_info.iter()
{
for version_major in begin_version_major..end_version_major {
for version_minor in begin_version_minor..end_version_minor {
let search_str = format!(
"[{lib}_version_greater_than_{version_major}_{version_minor}]",
version_major = version_major,
version_minor = version_minor,
lib = lib
);
let pos = stdout
.find(&search_str)
.expect("Variable not found in output")
+ search_str.len();
if &stdout[pos..pos + 1] == "1" {
println!(
r#"cargo:rustc-cfg=feature="{}""#,
&search_str[1..(search_str.len() - 1)]
);
}
}
}
}
}
fn search_include(include_paths: &Vec<PathBuf>, header: &str) -> String {
for dir in include_paths {
let include = dir.join(header);
if fs::metadata(&include).is_ok() {
return format!("{}", include.as_path().to_str().unwrap());
}
}
format!("/usr/include/{}", header)
}
fn | (statik: bool) {
let ffmpeg_ty = if statik { "static" } else { "dylib" };
for lib in LIBRARIES {
let feat_is_enabled = lib.feature_name().and_then(|f| env::var(&f).ok()).is_some();
if !lib.is_feature || feat_is_enabled {
println!("cargo:rustc-link-lib={}={}", ffmpeg_ty, lib.name);
}
}
if env::var("CARGO_FEATURE_BUILD_ZLIB").is_ok() && cfg!(target_os = "linux") {
println!("cargo:rustc-link-lib=z");
}
}
fn main() {
let statik = env::var("CARGO_FEATURE_STATIC").is_ok();
let include_paths: Vec<PathBuf> = if env::var("CARGO_FEATURE_BUILD").is_ok() {
println!(
"cargo:rustc-link-search=native={}",
search().join("lib").to_string_lossy()
);
link_to_libraries(statik);
if fs::metadata(&search().join("lib").join("libavutil.a")).is_err() {
fs::create_dir_all(&output())
.ok()
.expect("failed to create build directory");
fetch().unwrap();
build().unwrap();
}
// Check additional required libraries.
{
let config_mak = source().join("ffbuild/config.mak");
let file = File::open(config_mak).unwrap();
let reader = BufReader::new(file);
let extra_libs = reader
.lines()
.find(|ref line| line.as_ref().unwrap().starts_with("EXTRALIBS"))
.map(|line| line.unwrap())
.unwrap();
let linker_args = extra_libs.split('=').last().unwrap().split(' ');
let include_libs = linker_args
.filter(|v| v.starts_with("-l"))
.map(|flag| &flag[2..]);
for lib in include_libs {
println!("cargo:rustc-link-lib={}", lib);
}
}
vec![search().join("include")]
}
// Use prebuilt library
else if let Ok(ffmpeg_dir) = env::var("FFMPEG_DIR") {
let ffmpeg_dir = PathBuf::from(ffmpeg_dir);
println!(
"cargo:rustc-link-search=native={}",
ffmpeg_dir.join("lib").to_string_lossy()
);
link_to_libraries(statik);
vec![ffmpeg_dir.join("include")]
}
// Fallback to pkg-config
else {
pkg_config::Config::new()
.statik(statik)
.probe("libavutil")
.unwrap()
.include_paths;
let libs = vec![
("libavformat", "AVFORMAT"),
("libavfilter", "AVFILTER"),
("libavdevice", "AVDEVICE"),
("libavresample", "AVRESAMPLE"),
("libswscale", "SWSCALE"),
("libswresample", "SWRESAMPLE"),
];
for (lib_name, env_variable_name) in libs.iter() {
if env::var(format!("CARGO_FEATURE_{}", env_variable_name)).is_ok() {
pkg_config::Config::new()
.statik(statik)
.probe(lib_name)
.unwrap()
.include_paths;
}
}
pkg_config::Config::new()
.statik(statik)
.probe("libavcodec")
.unwrap()
.include_paths
};
if statik && cfg!(target_os = "macos") {
let frameworks = vec![
"AppKit",
"AudioToolbox",
"AVFoundation",
"CoreFoundation",
"CoreGraphics",
"CoreMedia",
"CoreServices",
"CoreVideo",
"Foundation",
"OpenCL",
"OpenGL",
"QTKit",
"QuartzCore",
"Security",
"VideoDecodeAcceleration",
"VideoToolbox",
];
for f in frameworks {
println!("cargo:rustc-link-lib=framework={}", f);
}
}
check_features(
include_paths.clone(),
&vec![
("libavutil/avutil.h", None, "FF_API_OLD_AVOPTIONS"),
("libavutil/avutil.h", None, "FF_API_PIX_FMT"),
("libavutil/avutil.h", None, "FF_API_CONTEXT_SIZE"),
("libavutil/avutil.h", None, "FF_API_PIX_FMT_DESC"),
("libavutil/avutil.h", None, "FF_API_AV_REVERSE"),
("libavutil/avutil.h", None, "FF_API_AUDIOCONVERT"),
("libavutil/avutil.h", None, "FF_API_CPU_FLAG_MMX2"),
("libavutil/avutil.h", None, "FF_API_LLS_PRIVATE"),
("libavutil/avutil.h", None, "FF_API_AVFRAME_LAVC"),
("libavutil/avutil.h", None, "FF_API_VDPAU"),
(
"libavutil/avutil.h",
None,
"FF_API_GET_CHANNEL_LAYOUT_COMPAT",
),
("libavutil/avutil.h", None, "FF_API_XVMC"),
("libavutil/avutil.h", None, "FF_API_OPT_TYPE_METADATA"),
("libavutil/avutil.h", None, "FF_API_DLOG"),
("libavutil/avutil.h", None, "FF_API_HMAC"),
("libavutil/avutil.h", None, "FF_API_VAAPI"),
("libavutil/avutil.h", None, "FF_API_PKT_PTS"),
("libavutil/avutil.h", None, "FF_API_ERROR_FRAME"),
("libavutil/avutil.h", None, "FF_API_FRAME_QP"),
(
"libavcodec/avcodec.h",
Some("avcodec"),
"FF_API_VIMA_DECODER",
),
(
"libavcodec/avcodec.h",
Some("avcodec"),
"FF_API_REQUEST_CHANNELS",
),
(
"libavcodec/avcodec.h",
Some("avcodec"),
"FF_API_OLD_DECODE_AUDIO",
),
(
"libavcodec/avcodec.h",
Some("avcodec"),
"FF_API_OLD_ENCODE_AUDIO",
),
(
"libavcodec/avcodec.h",
Some("avcodec"),
"FF_API_OLD_ENCODE_VIDEO",
),
("libavcodec/avcodec.h", Some("avcodec"), "FF_API_CODEC_ID"),
(
"libavcodec/avcodec.h",
Some("avcodec"),
"FF_API_AUDIO_CONVERT",
),
(
"libavcodec/avcodec.h",
Some("avcodec"),
"FF_API_AVCODEC_RESAMPLE",
),
(
"libavcodec/avcodec.h",
Some("avcodec"),
"FF_API_DEINTERLACE",
),
(
"libavcodec/avcodec.h",
Some("avcodec"),
"FF_API_DESTRUCT_PACKET",
),
("libavcodec/avcodec.h", Some("avcodec"), "FF_API_GET_BUFFER"),
(
"libavcodec/avcodec.h",
Some("avcodec"),
"FF_API_MISSING_SAMPLE",
),
("libavcodec/avcodec.h", Some("avcodec"), "FF_API_LOWRES"),
("libavcodec/avcodec.h", Some("avcodec"), "FF_API_CAP_VDPAU"),
("libavcodec/avcodec.h", Some("avcodec"), "FF_API_BUFS_VDPAU"),
("libavcodec/avcodec.h", Some("avcodec"), "FF_API_VOXWARE"),
(
"libavcodec/avcodec.h",
Some("avcodec"),
"FF_API_SET_DIMENSIONS",
),
("libavcodec/avcodec.h", Some("avcodec"), "FF_API_DEBUG_MV"),
("libavcodec/avcodec.h", Some("avcodec"), "FF_API_AC_VLC"),
(
"libavcodec/avcodec.h",
Some("avcodec"),
"FF_API_OLD_MSMPEG4",
),
(
"libavcodec/avcodec.h",
Some("avcodec"),
"FF_API_ASPECT_EXTENDED",
),
(
"libavcodec/avcodec.h",
Some("avcodec"),
"FF_API_THREAD_OPAQUE",
),
("libavcodec/avcodec.h", Some("avcodec"), "FF_API_CODEC_PKT"),
("libavcodec/avcodec.h", Some("avcodec"), "FF_API_ARCH_ALPHA"),
("libavcodec/avcodec.h", Some("avcodec"), "FF_API_XVMC"),
("libavcodec/avcodec.h", Some("avcodec"), "FF_API_ERROR_RATE"),
(
"libavcodec/avcodec.h",
Some("avcodec"),
"FF_API_QSCALE_TYPE",
),
("libavcodec/avcodec.h", Some("avcodec"), "FF_API_MB_TYPE"),
(
"libavcodec/avcodec.h",
Some("avcodec"),
"FF_API_MAX_BFRAMES",
),
(
"libavcodec/avcodec.h",
Some("avcodec"),
"FF_API_NEG_LINESIZES",
),
("libavcodec/avcodec.h", Some("avcodec"), "FF_API_EMU_EDGE"),
("libavcodec/avcodec.h", Some("avcodec"), "FF_API_ARCH_SH4"),
("libavcodec/avcodec.h", Some("avcodec"), "FF_API_ARCH_SPARC"),
(
"libavcodec/avcodec.h",
Some("avcodec"),
"FF_API_UNUSED_MEMBERS",
),
(
"libavcodec/avcodec.h",
Some("avcodec"),
"FF_API_IDCT_XVIDMMX",
),
(
"libavcodec/avcodec.h",
Some("avcodec"),
"FF_API_INPUT_PRESERVED",
),
(
"libavcodec/avcodec.h",
Some("avcodec"),
"FF_API_NORMALIZE_AQP",
),
("libavcodec/avcodec.h", Some("avcodec"), "FF_API_GMC"),
("libavcodec/avcodec.h", Some("avcodec"), "FF_API_MV0"),
("libavcodec/avcodec.h", Some("avcodec"), "FF_API_CODEC_NAME"),
("libavcodec/avcodec.h", Some("avcodec"), "FF_API_AFD"),
("libavcodec/avcodec.h", Some("avcodec"), "FF_API_VISMV"),
(
"libavcodec/avcodec.h",
Some("avcodec"),
"FF_API_DV_FRAME_PROFILE",
),
(
"libavcodec/avcodec.h",
Some("avcodec"),
"FF_API_AUDIOENC_DELAY",
),
(
"libavcodec/avcodec.h",
Some("avcodec"),
"FF_API_VAAPI_CONTEXT",
),
(
"libavcodec/avcodec.h",
Some("avcodec"),
"FF_API_AVCTX_TIMEBASE",
),
("libavcodec/avcodec.h", Some("avcodec"), "FF_API_MPV_OPT"),
(
"libavcodec/avcodec.h",
Some("avcodec"),
"FF_API_STREAM_CODEC_TAG",
),
("libavcodec/avcodec.h", Some("avcodec"), "FF_API_QUANT_BIAS"),
(
"libavcodec/avcodec.h",
Some("avcodec"),
"FF_API_RC_STRATEGY",
),
(
"libavcodec/avcodec.h",
Some("avcodec"),
"FF_API_CODED_FRAME",
),
("libavcodec/avcodec.h", Some("avcodec"), "FF_API_MOTION_EST"),
(
"libavcodec/avcodec.h",
Some("avcodec"),
"FF_API_WITHOUT_PREFIX",
),
(
"libavcodec/avcodec.h",
Some("avcodec"),
"FF_API_CONVERGENCE_DURATION",
),
(
"libavcodec/avcodec.h",
Some("avcodec"),
"FF_API_PRIVATE_OPT",
),
("libavcodec/avcodec.h", Some("avcodec"), "FF_API_CODER_TYPE"),
(
"libavcodec/avcodec.h",
Some("avcodec"),
"FF_API_RTP_CALLBACK",
),
("libavcodec/avcodec.h", Some("avcodec"), "FF_API_STAT_BITS"),
("libavcodec/avcodec.h", Some("avcodec"), "FF_API_VBV_DELAY"),
(
"libavcodec/avcodec.h",
Some("avcodec"),
"FF_API_SIDEDATA_ONLY_PKT",
),
("libavcodec/avcodec.h", Some("avcodec"), "FF_API_AVPICTURE"),
(
"libavformat/avformat.h",
Some("avformat"),
"FF_API_LAVF_BITEXACT",
),
(
"libavformat/avformat.h",
Some("avformat"),
"FF_API_LAVF_FRAC",
),
(
"libavformat/avformat.h",
Some("avformat"),
"FF_API_URL_FEOF",
),
(
"libavformat/avformat.h",
Some("avformat"),
"FF_API_PROBESIZE_32",
),
(
"libavformat/avformat.h",
Some("avformat"),
"FF_API_LAVF_AVCTX",
),
(
"libavformat/avformat.h",
Some("avformat"),
"FF_API_OLD_OPEN_CALLBACKS",
),
(
"libavfilter/avfilter.h",
Some("avfilter"),
"FF_API_AVFILTERPAD_PUBLIC",
),
(
"libavfilter/avfilter.h",
Some("avfilter"),
"FF_API_FOO_COUNT",
),
(
"libavfilter/avfilter.h",
Some("avfilter"),
"FF_API_OLD_FILTER_OPTS",
),
(
"libavfilter/avfilter.h",
Some("avfilter"),
"FF_API_OLD_FILTER_OPTS_ERROR",
),
(
"libavfilter/avfilter.h",
Some("avfilter"),
"FF_API_AVFILTER_OPEN",
),
(
"libavfilter/avfilter.h",
Some("avfilter"),
"FF_API_OLD_FILTER_REGISTER",
),
(
"libavfilter/avfilter.h",
Some("avfilter"),
"FF_API_OLD_GRAPH_PARSE",
),
(
"libavfilter/avfilter.h",
Some("avfilter"),
"FF_API_NOCONST_GET_NAME",
),
(
"libavresample/avresample.h",
Some("avresample"),
"FF_API_RESAMPLE_CLOSE_OPEN",
),
(
"libswscale/swscale.h",
Some("swscale"),
"FF_API_SWS_CPU_CAPS",
),
("libswscale/swscale.h", Some("swscale"), "FF_API_ARCH_BFIN"),
],
);
let tmp = std::env::current_dir().unwrap().join("tmp");
if symlink_metadata(&tmp).is_err() {
create_dir(&tmp).expect("Failed to create temporary output dir");
}
let mut f = File::create(tmp.join(".build")).expect("Filed to create .build");
let tool = cc::Build::new().get_compiler();
write!(f, "{}", tool.path().to_string_lossy().into_owned()).expect("failed to write cmd");
for arg in tool.args() {
write!(f, " {}", arg.to_str().unwrap()).expect("failed to write arg");
}
for dir in &include_paths {
write!(f, " -I {}", dir.to_string_lossy().into_owned()).expect("failed to write incdir");
}
let clang_includes = include_paths
.iter()
.map(|include| format!("-I{}", include.to_string_lossy()));
// The bindgen::Builder is the main entry point
// to bindgen, and lets you build up options for
// the resulting bindings.
let mut builder = bindgen::Builder::default()
.clang_args(clang_includes)
.ctypes_prefix("libc")
// https://github.com/servo/rust-bindgen/issues/687
.blacklist_type("FP_NAN")
.blacklist_type("FP_INFINITE")
.blacklist_type("FP_ZERO")
.blacklist_type("FP_SUBNORMAL")
.blacklist_type("FP_NORMAL")
// https://github.com/servo/rust-bindgen/issues/550
.blacklist_type("max_align_t")
.rustified_enum("*")
.prepend_enum_name(false)
.derive_eq(true)
.parse_callbacks(Box::new(IntCallbacks));
// The input headers we would like to generate
// bindings for.
if env::var("CARGO_FEATURE_AVCODEC").is_ok() {
builder = builder
.header(search_include(&include_paths, "libavcodec/avcodec.h"))
.header(search_include(&include_paths, "libavcodec/dv_profile.h"))
.header(search_include(&include_paths, "libavcodec/avfft.h"))
.header(search_include(&include_paths, "libavcodec/vaapi.h"))
.header(search_include(&include_paths, "libavcodec/vorbis_parser.h"));
}
if env::var("CARGO_FEATURE_AVDEVICE").is_ok() {
builder = builder.header(search_include(&include_paths, "libavdevice/avdevice.h"));
}
if env::var("CARGO_FEATURE_AVFILTER").is_ok() {
builder = builder
.header(search_include(&include_paths, "libavfilter/buffersink.h"))
.header(search_include(&include_paths, "libavfilter/buffersrc.h"))
.header(search_include(&include_paths, "libavfilter/avfilter.h"));
}
if env::var("CARGO_FEATURE_AVFORMAT").is_ok() {
builder = builder
.header(search_include(&include_paths, "libavformat/avformat.h"))
.header(search_include(&include_paths, "libavformat/avio.h"));
}
if env::var("CARGO_FEATURE_AVRESAMPLE").is_ok() {
builder = builder.header(search_include(&include_paths, "libavresample/avresample.h"));
}
builder = builder
.header(search_include(&include_paths, "libavutil/adler32.h"))
.header(search_include(&include_paths, "libavutil/aes.h"))
.header(search_include(&include_paths, "libavutil/audio_fifo.h"))
.header(search_include(&include_paths, "libavutil/base64.h"))
.header(search_include(&include_paths, "libavutil/blowfish.h"))
.header(search_include(&include_paths, "libavutil/bprint.h"))
.header(search_include(&include_paths, "libavutil/buffer.h"))
.header(search_include(&include_paths, "libavutil/camellia.h"))
.header(search_include(&include_paths, "libavutil/cast5.h"))
.header(search_include(&include_paths, "libavutil/channel_layout.h"))
.header(search_include(&include_paths, "libavutil/cpu.h"))
.header(search_include(&include_paths, "libavutil/crc.h"))
.header(search_include(&include_paths, "libavutil/dict.h"))
.header(search_include(&include_paths, "libavutil/display.h"))
.header(search_include(&include_paths, "libavutil/downmix_info.h"))
.header(search_include(&include_paths, "libavutil/error.h"))
.header(search_include(&include_paths, "libavutil/eval.h"))
.header(search_include(&include_paths, "libavutil/fifo.h"))
.header(search_include(&include_paths, "libavutil/file.h"))
.header(search_include(&include_paths, "libavutil/frame.h"))
.header(search_include(&include_paths, "libavutil/hash.h"))
.header(search_include(&include_paths, "libavutil/hmac.h"))
.header(search_include(&include_paths, "libavutil/imgutils.h"))
.header(search_include(&include_paths, "libavutil/lfg.h"))
.header(search_include(&include_paths, "libavutil/log.h"))
.header(search_include(&include_paths, "libavutil/lzo.h"))
.header(search_include(&include_paths, "libavutil/macros.h"))
.header(search_include(&include_paths, "libavutil/mathematics.h"))
.header(search_include(&include_paths, "libavutil/md5.h"))
.header(search_include(&include_paths, "libavutil/mem.h"))
.header(search_include(&include_paths, "libavutil/motion_vector.h"))
.header(search_include(&include_paths, "libavutil/murmur3.h"))
.header(search_include(&include_paths, "libavutil/opt.h"))
.header(search_include(&include_paths, "libavutil/parseutils.h"))
.header(search_include(&include_paths, "libavutil/pixdesc.h"))
.header(search_include(&include_paths, "libavutil/pixfmt.h"))
.header(search_include(&include_paths, "libavutil/random_seed.h"))
.header(search_include(&include_paths, "libavutil/rational.h"))
.header(search_include(&include_paths, "libavutil/replaygain.h"))
.header(search_include(&include_paths, "libavutil/ripemd.h"))
.header(search_include(&include_paths, "libavutil/samplefmt.h"))
.header(search_include(&include_paths, "libavutil/sha.h"))
.header(search_include(&include_paths, "libavutil/sha512.h"))
.header(search_include(&include_paths, "libavutil/stereo3d.h"))
.header(search_include(&include_paths, "libavutil/avstring.h"))
.header(search_include(&include_paths, "libavutil/threadmessage.h"))
.header(search_include(&include_paths, "libavutil/time.h"))
.header(search_include(&include_paths, "libavutil/timecode.h"))
.header(search_include(&include_paths, "libavutil/twofish.h"))
.header(search_include(&include_paths, "libavutil/avutil.h"))
.header(search_include(&include_paths, "libavutil/xtea.h"));
if env::var("CARGO_FEATURE_POSTPROC").is_ok() {
builder = builder.header(search_include(&include_paths, "libpostproc/postprocess.h"));
}
if env::var("CARGO_FEATURE_SWRESAMPLE").is_ok() {
builder = builder.header(search_include(&include_paths, "libswresample/swresample.h"));
}
if env::var("CARGO_FEATURE_SWSCALE").is_ok() {
builder = builder.header(search_include(&include_paths, "libswscale/swscale.h"));
}
// Finish the builder and generate the bindings.
let bindings = builder
.generate()
// Unwrap the Result and panic on failure.
.expect("Unable to generate bindings");
// Write the bindings to the $OUT_DIR/bindings.rs file.
bindings
.write_to_file(output().join("bindings.rs"))
.expect("Couldn't write bindings!");
}
| link_to_libraries |
useTitle.ts | import { watch, unref } from 'vue';
import { useI18n } from '@/hooks/web/useI18n';
import { useTitle as usePageTitle } from '@vueuse/core';
import { useGlobSetting } from '@/hooks/setting';
import { useRouter } from 'vue-router';
import { REDIRECT_NAME } from '@/router/constant';
export function useTitle() {
const { title } = useGlobSetting();
const { t } = useI18n();
const { currentRoute } = useRouter();
const pageTitle = usePageTitle();
watch(
() => currentRoute.value.path, | const route = unref(currentRoute);
if (route.name === REDIRECT_NAME) {
return;
}
const tTitle = t(route?.meta?.title as string);
pageTitle.value = tTitle ? ` ${tTitle} - ${title} ` : `${title}`;
},
{ immediate: true }
);
} | () => { |
service_plan.go | package ccv3
import (
"code.cloudfoundry.org/cli/api/cloudcontroller/ccv3/constant"
"code.cloudfoundry.org/cli/api/cloudcontroller/ccv3/internal"
"code.cloudfoundry.org/cli/resources"
"code.cloudfoundry.org/jsonry"
)
// ServicePlan represents a Cloud Controller V3 Service Plan.
type ServicePlan struct {
// GUID is a unique service plan identifier.
GUID string `json:"guid"`
// Name is the name of the service plan.
Name string `json:"name"`
// Description of the Service Plan.
Description string `json:"description"`
// Whether the Service Plan is available
Available bool `json:"available"`
// VisibilityType can be "public", "admin", "organization" or "space"
VisibilityType VisibilityType `json:"visibility_type"`
// Free shows whether or not the Service Plan is free of charge.
Free bool `json:"free"`
// Cost shows the cost of a paid service plan
Costs []Cost `json:"costs"`
// ServicePlanGUID is the GUID of the service offering
ServiceOfferingGUID string `jsonry:"relationships.service_offering.data.guid"`
// SpaceGUID is the space that a plan from a space-scoped broker relates to
SpaceGUID string `jsonry:"relationships.space.data.guid"`
Metadata *resources.Metadata `json:"metadata"`
}
func (sp *ServicePlan) UnmarshalJSON(data []byte) error {
return jsonry.Unmarshal(data, sp)
}
type Cost struct {
Amount float64 `json:"amount"`
Currency string `json:"currency"`
Unit string `json:"unit"`
}
// GetServicePlans lists service plan with optional filters.
func (client *Client) GetServicePlans(query ...Query) ([]ServicePlan, Warnings, error) {
plans, _, warnings, err := client.getServicePlans(query...)
return plans, warnings, err
}
func (client *Client) getServicePlans(query ...Query) ([]ServicePlan, IncludedResources, Warnings, error) {
var plans []ServicePlan
included, warnings, err := client.MakeListRequest(RequestParams{
RequestName: internal.GetServicePlansRequest,
Query: query,
ResponseBody: ServicePlan{},
AppendToList: func(item interface{}) error {
plans = append(plans, item.(ServicePlan))
return nil
},
})
return plans, included, warnings, err
}
type ServicePlanWithSpaceAndOrganization struct {
// GUID is a unique service plan identifier.
GUID string
// Name is the name of the service plan.
Name string
// VisibilityType can be "public", "admin", "organization" or "space"
VisibilityType VisibilityType
// ServicePlanGUID is the GUID of the service offering
ServiceOfferingGUID string
SpaceGUID string
SpaceName string
OrganizationName string
}
type planSpaceDetails struct{ spaceName, orgName string }
func (client *Client) GetServicePlansWithSpaceAndOrganization(query ...Query) ([]ServicePlanWithSpaceAndOrganization, Warnings, error) {
query = append(query, Query{
Key: Include,
Values: []string{"space.organization"},
})
plans, included, warnings, err := client.getServicePlans(query...)
spaceDetailsFromGUID := computeSpaceDetailsTable(included)
var enrichedPlans []ServicePlanWithSpaceAndOrganization
for _, plan := range plans {
sd := spaceDetailsFromGUID[plan.SpaceGUID]
enrichedPlans = append(enrichedPlans, ServicePlanWithSpaceAndOrganization{
GUID: plan.GUID,
Name: plan.Name,
VisibilityType: plan.VisibilityType,
ServiceOfferingGUID: plan.ServiceOfferingGUID,
SpaceGUID: plan.SpaceGUID,
SpaceName: sd.spaceName,
OrganizationName: sd.orgName,
})
}
return enrichedPlans, warnings, err
}
type ServiceOfferingWithPlans struct {
// GUID is a unique service offering identifier.
GUID string
// Name is the name of the service offering.
Name string
// Description of the service offering
Description string
// ServiceBrokerName is the name of the service broker
ServiceBrokerName string
// List of service plans that this service offering provides
Plans []ServicePlan
}
func (client *Client) GetServicePlansWithOfferings(query ...Query) ([]ServiceOfferingWithPlans, Warnings, error) {
query = append(query, Query{
Key: Include,
Values: []string{"service_offering"},
})
query = append(query, Query{
Key: FieldsServiceOfferingServiceBroker,
Values: []string{"name,guid"},
})
plans, included, warnings, err := client.getServicePlans(query...)
if err != nil |
var offeringsWithPlans []ServiceOfferingWithPlans
offeringGUIDLookup := make(map[string]int)
indexOfOffering := func(serviceOfferingGUID string) int {
if i, ok := offeringGUIDLookup[serviceOfferingGUID]; ok {
return i
}
i := len(offeringsWithPlans)
offeringGUIDLookup[serviceOfferingGUID] = i
offeringsWithPlans = append(offeringsWithPlans, ServiceOfferingWithPlans{GUID: serviceOfferingGUID})
return i
}
brokerNameLookup := make(map[string]string)
for _, b := range included.ServiceBrokers {
brokerNameLookup[b.GUID] = b.Name
}
for _, p := range plans {
i := indexOfOffering(p.ServiceOfferingGUID)
offeringsWithPlans[i].Plans = append(offeringsWithPlans[i].Plans, p)
}
for _, o := range included.ServiceOfferings {
i := indexOfOffering(o.GUID)
offeringsWithPlans[i].Name = o.Name
offeringsWithPlans[i].Description = o.Description
offeringsWithPlans[i].ServiceBrokerName = brokerNameLookup[o.ServiceBrokerGUID]
}
return offeringsWithPlans, warnings, nil
}
func computeSpaceDetailsTable(included IncludedResources) map[string]planSpaceDetails {
orgNameFromGUID := make(map[string]string)
for _, org := range included.Organizations {
orgNameFromGUID[org.GUID] = org.Name
}
spaceDetailsFromGUID := make(map[string]planSpaceDetails)
for _, space := range included.Spaces {
details := planSpaceDetails{spaceName: space.Name}
if orgRelationship, ok := space.Relationships[constant.RelationshipTypeOrganization]; ok {
details.orgName = orgNameFromGUID[orgRelationship.GUID]
}
spaceDetailsFromGUID[space.GUID] = details
}
return spaceDetailsFromGUID
}
| {
return nil, warnings, err
} |
template.test.ts | import { expect } from 'chai';
describe('Addition', () => {
it('1 + 1 should equal 2', () => { | });
}); | expect(1 + 1).to.equal(2); |
dd_dialog.rs | #[cfg(target_family = "unix")]
use std::cmp::min;
use tui::{
backend::Backend,
layout::{Alignment, Constraint, Direction, Layout, Rect},
terminal::Frame,
text::{Span, Spans, Text},
widgets::{Block, Borders, Paragraph, Wrap},
};
use crate::{
app::{App, KillSignal},
canvas::Painter,
};
const DD_BASE: &str = " Confirm Kill Process ── Esc to close ";
const DD_ERROR_BASE: &str = " Error ── Esc to close ";
pub trait KillDialog {
fn get_dd_spans(&self, app_state: &App) -> Option<Text<'_>>;
fn draw_dd_confirm_buttons<B: Backend>(
&self, f: &mut Frame<'_, B>, button_draw_loc: &Rect, app_state: &mut App,
);
fn draw_dd_dialog<B: Backend>(
&self, f: &mut Frame<'_, B>, dd_text: Option<Text<'_>>, app_state: &mut App, draw_loc: Rect,
) -> bool;
}
impl KillDialog for Painter {
fn get_dd_spans(&self, app_state: &App) -> Option<Text<'_>> {
if let Some(dd_err) = &app_state.dd_err {
return Some(Text::from(vec![
Spans::default(),
Spans::from("Failed to kill process."),
Spans::from(dd_err.clone()),
Spans::from("Please press ENTER or ESC to close this dialog."),
]));
} else if let Some(to_kill_processes) = app_state.get_to_delete_processes() {
if let Some(first_pid) = to_kill_processes.1.first() {
return Some(Text::from(vec![
Spans::from(""),
if app_state.is_grouped(app_state.current_widget.widget_id) {
if to_kill_processes.1.len() != 1 {
Spans::from(format!(
"Kill {} processes with the name \"{}\"? Press ENTER to confirm.",
to_kill_processes.1.len(),
to_kill_processes.0
))
} else {
Spans::from(format!(
"Kill 1 process with the name \"{}\"? Press ENTER to confirm.",
to_kill_processes.0
))
}
} else {
Spans::from(format!(
"Kill process \"{}\" with PID {}? Press ENTER to confirm.",
to_kill_processes.0, first_pid
))
},
]));
}
}
None
}
#[cfg(target_os = "windows")]
fn draw_dd_confirm_buttons<B: Backend>(
&self, f: &mut Frame<'_, B>, button_draw_loc: &Rect, app_state: &mut App,
) {
| cfg(target_family = "unix")]
fn draw_dd_confirm_buttons<B: Backend>(
&self, f: &mut Frame<'_, B>, button_draw_loc: &Rect, app_state: &mut App,
) {
let signal_text;
#[cfg(target_os = "linux")]
{
signal_text = vec![
"0: Cancel",
"1: HUP",
"2: INT",
"3: QUIT",
"4: ILL",
"5: TRAP",
"6: ABRT",
"7: BUS",
"8: FPE",
"9: KILL",
"10: USR1",
"11: SEGV",
"12: USR2",
"13: PIPE",
"14: ALRM",
"15: TERM",
"16: STKFLT",
"17: CHLD",
"18: CONT",
"19: STOP",
"20: TSTP",
"21: TTIN",
"22: TTOU",
"23: URG",
"24: XCPU",
"25: XFSZ",
"26: VTALRM",
"27: PROF",
"28: WINCH",
"29: IO",
"30: PWR",
"31: SYS",
"34: RTMIN",
"35: RTMIN+1",
"36: RTMIN+2",
"37: RTMIN+3",
"38: RTMIN+4",
"39: RTMIN+5",
"40: RTMIN+6",
"41: RTMIN+7",
"42: RTMIN+8",
"43: RTMIN+9",
"44: RTMIN+10",
"45: RTMIN+11",
"46: RTMIN+12",
"47: RTMIN+13",
"48: RTMIN+14",
"49: RTMIN+15",
"50: RTMAX-14",
"51: RTMAX-13",
"52: RTMAX-12",
"53: RTMAX-11",
"54: RTMAX-10",
"55: RTMAX-9",
"56: RTMAX-8",
"57: RTMAX-7",
"58: RTMAX-6",
"59: RTMAX-5",
"60: RTMAX-4",
"61: RTMAX-3",
"62: RTMAX-2",
"63: RTMAX-1",
"64: RTMAX",
];
}
#[cfg(target_os = "macos")]
{
signal_text = vec![
"0: Cancel",
"1: HUP",
"2: INT",
"3: QUIT",
"4: ILL",
"5: TRAP",
"6: ABRT",
"7: EMT",
"8: FPE",
"9: KILL",
"10: BUS",
"11: SEGV",
"12: SYS",
"13: PIPE",
"14: ALRM",
"15: TERM",
"16: URG",
"17: STOP",
"18: TSTP",
"19: CONT",
"20: CHLD",
"21: TTIN",
"22: TTOU",
"23: IO",
"24: XCPU",
"25: XFSZ",
"26: VTALRM",
"27: PROF",
"28: WINCH",
"29: INFO",
"30: USR1",
"31: USR2",
];
}
let button_rect = Layout::default()
.direction(Direction::Horizontal)
.margin(1)
.constraints(
[
Constraint::Length((button_draw_loc.width - 14) / 2),
Constraint::Min(0),
Constraint::Length((button_draw_loc.width - 14) / 2),
]
.as_ref(),
)
.split(*button_draw_loc)[1];
let mut selected = match app_state.delete_dialog_state.selected_signal {
KillSignal::CANCEL => 0,
KillSignal::KILL(signal) => signal,
};
// 32+33 are skipped
if selected > 31 {
selected -= 2;
}
let layout = Layout::default()
.direction(Direction::Vertical)
.constraints(vec![Constraint::Min(1); button_rect.height as usize])
.split(button_rect);
let prev_offset: usize = app_state.delete_dialog_state.scroll_pos;
app_state.delete_dialog_state.scroll_pos = if selected == 0 {
0
} else if selected < prev_offset + 1 {
selected - 1
} else if selected > prev_offset + (layout.len() as usize) - 1 {
selected - (layout.len() as usize) + 1
} else {
prev_offset
};
let scroll_offset: usize = app_state.delete_dialog_state.scroll_pos;
let mut buttons = signal_text
[scroll_offset + 1..min((layout.len() as usize) + scroll_offset, signal_text.len())]
.iter()
.map(|text| Span::raw(*text))
.collect::<Vec<Span<'_>>>();
buttons.insert(0, Span::raw(signal_text[0]));
buttons[selected - scroll_offset] = Span::styled(
signal_text[selected],
self.colours.currently_selected_text_style,
);
app_state.delete_dialog_state.button_positions = layout
.iter()
.enumerate()
.map(|(i, pos)| {
(
pos.x,
pos.y,
pos.x + pos.width - 1,
pos.y + pos.height - 1,
if i == 0 { 0 } else { scroll_offset } + i,
)
})
.collect::<Vec<(u16, u16, u16, u16, usize)>>();
for (btn, pos) in buttons.into_iter().zip(layout.into_iter()) {
f.render_widget(Paragraph::new(btn).alignment(Alignment::Left), pos);
}
}
fn draw_dd_dialog<B: Backend>(
&self, f: &mut Frame<'_, B>, dd_text: Option<Text<'_>>, app_state: &mut App, draw_loc: Rect,
) -> bool {
if let Some(dd_text) = dd_text {
let dd_title = if app_state.dd_err.is_some() {
Spans::from(vec![
Span::styled(" Error ", self.colours.widget_title_style),
Span::styled(
format!(
"─{}─ Esc to close ",
"─".repeat(
usize::from(draw_loc.width)
.saturating_sub(DD_ERROR_BASE.chars().count() + 2)
)
),
self.colours.border_style,
),
])
} else {
Spans::from(vec![
Span::styled(" Confirm Kill Process ", self.colours.widget_title_style),
Span::styled(
format!(
"─{}─ Esc to close ",
"─".repeat(
usize::from(draw_loc.width)
.saturating_sub(DD_BASE.chars().count() + 2)
)
),
self.colours.border_style,
),
])
};
f.render_widget(
Paragraph::new(dd_text)
.block(
Block::default()
.title(dd_title)
.style(self.colours.border_style)
.borders(Borders::ALL)
.border_style(self.colours.border_style),
)
.style(self.colours.text_style)
.alignment(Alignment::Center)
.wrap(Wrap { trim: true }),
draw_loc,
);
let btn_height;
#[cfg(target_family = "unix")]
{
btn_height = 20;
}
#[cfg(target_os = "windows")]
{
btn_height = 3;
}
// Now draw buttons if needed...
let split_draw_loc = Layout::default()
.direction(Direction::Vertical)
.constraints(
if app_state.dd_err.is_some() {
vec![Constraint::Percentage(100)]
} else {
vec![Constraint::Min(3), Constraint::Length(btn_height)]
}
.as_ref(),
)
.split(draw_loc);
// This being true implies that dd_err is none.
if let Some(button_draw_loc) = split_draw_loc.get(1) {
self.draw_dd_confirm_buttons(f, button_draw_loc, app_state);
}
if app_state.dd_err.is_some() {
return app_state.delete_dialog_state.is_showing_dd;
} else {
return true;
}
}
// Currently we just return "false" if things go wrong finding
// the process or a first PID (if an error arises it should be caught).
// I don't really like this, and I find it ugly, but it works for now.
false
}
}
| let (yes_button, no_button) = match app_state.delete_dialog_state.selected_signal {
KillSignal::KILL(_) => (
Span::styled("Yes", self.colours.currently_selected_text_style),
Span::raw("No"),
),
KillSignal::CANCEL => (
Span::raw("Yes"),
Span::styled("No", self.colours.currently_selected_text_style),
),
};
let button_layout = Layout::default()
.direction(Direction::Horizontal)
.constraints(
[
Constraint::Percentage(35),
Constraint::Percentage(30),
Constraint::Percentage(35),
]
.as_ref(),
)
.split(*button_draw_loc);
f.render_widget(
Paragraph::new(yes_button)
.block(Block::default())
.alignment(Alignment::Right),
button_layout[0],
);
f.render_widget(
Paragraph::new(no_button)
.block(Block::default())
.alignment(Alignment::Left),
button_layout[2],
);
if app_state.should_get_widget_bounds() {
app_state.delete_dialog_state.button_positions = vec![
(
button_layout[2].x,
button_layout[2].y,
button_layout[2].x + button_layout[2].width,
button_layout[2].y + button_layout[2].height,
0,
),
(
button_layout[0].x,
button_layout[0].y,
button_layout[0].x + button_layout[0].width,
button_layout[0].y + button_layout[0].height,
1,
),
];
}
}
#[ |
persistent_volumes-gce.go | /*
Copyright 2017 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package storage
import (
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
v1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/labels"
"k8s.io/apimachinery/pkg/types"
utilerrors "k8s.io/apimachinery/pkg/util/errors"
clientset "k8s.io/client-go/kubernetes"
"k8s.io/kubernetes/test/e2e/framework"
e2elog "k8s.io/kubernetes/test/e2e/framework/log"
"k8s.io/kubernetes/test/e2e/framework/providers/gce"
"k8s.io/kubernetes/test/e2e/storage/utils"
)
// verifyGCEDiskAttached performs a sanity check to verify the PD attached to the node
func verifyGCEDiskAttached(diskName string, nodeName types.NodeName) bool |
// initializeGCETestSpec creates a PV, PVC, and ClientPod that will run until killed by test or clean up.
func initializeGCETestSpec(c clientset.Interface, ns string, pvConfig framework.PersistentVolumeConfig, pvcConfig framework.PersistentVolumeClaimConfig, isPrebound bool) (*v1.Pod, *v1.PersistentVolume, *v1.PersistentVolumeClaim) {
By("Creating the PV and PVC")
pv, pvc, err := framework.CreatePVPVC(c, pvConfig, pvcConfig, ns, isPrebound)
framework.ExpectNoError(err)
framework.ExpectNoError(framework.WaitOnPVandPVC(c, ns, pv, pvc))
By("Creating the Client Pod")
clientPod, err := framework.CreateClientPod(c, ns, pvc)
framework.ExpectNoError(err)
return clientPod, pv, pvc
}
// Testing configurations of single a PV/PVC pair attached to a GCE PD
var _ = utils.SIGDescribe("PersistentVolumes GCEPD", func() {
var (
c clientset.Interface
diskName string
ns string
err error
pv *v1.PersistentVolume
pvc *v1.PersistentVolumeClaim
clientPod *v1.Pod
pvConfig framework.PersistentVolumeConfig
pvcConfig framework.PersistentVolumeClaimConfig
volLabel labels.Set
selector *metav1.LabelSelector
node types.NodeName
)
f := framework.NewDefaultFramework("pv")
BeforeEach(func() {
c = f.ClientSet
ns = f.Namespace.Name
// Enforce binding only within test space via selector labels
volLabel = labels.Set{framework.VolumeSelectorKey: ns}
selector = metav1.SetAsLabelSelector(volLabel)
framework.SkipUnlessProviderIs("gce", "gke")
By("Initializing Test Spec")
diskName, err = framework.CreatePDWithRetry()
framework.ExpectNoError(err)
pvConfig = framework.PersistentVolumeConfig{
NamePrefix: "gce-",
Labels: volLabel,
PVSource: v1.PersistentVolumeSource{
GCEPersistentDisk: &v1.GCEPersistentDiskVolumeSource{
PDName: diskName,
FSType: "ext3",
ReadOnly: false,
},
},
Prebind: nil,
}
emptyStorageClass := ""
pvcConfig = framework.PersistentVolumeClaimConfig{
Selector: selector,
StorageClassName: &emptyStorageClass,
}
clientPod, pv, pvc = initializeGCETestSpec(c, ns, pvConfig, pvcConfig, false)
node = types.NodeName(clientPod.Spec.NodeName)
})
AfterEach(func() {
e2elog.Logf("AfterEach: Cleaning up test resources")
if c != nil {
framework.ExpectNoError(framework.DeletePodWithWait(f, c, clientPod))
if errs := framework.PVPVCCleanup(c, ns, pv, pvc); len(errs) > 0 {
framework.Failf("AfterEach: Failed to delete PVC and/or PV. Errors: %v", utilerrors.NewAggregate(errs))
}
clientPod, pv, pvc, node = nil, nil, nil, ""
if diskName != "" {
framework.ExpectNoError(framework.DeletePDWithRetry(diskName))
}
}
})
// Attach a persistent disk to a pod using a PVC.
// Delete the PVC and then the pod. Expect the pod to succeed in unmounting and detaching PD on delete.
It("should test that deleting a PVC before the pod does not cause pod deletion to fail on PD detach", func() {
By("Deleting the Claim")
framework.ExpectNoError(framework.DeletePersistentVolumeClaim(c, pvc.Name, ns), "Unable to delete PVC ", pvc.Name)
Expect(verifyGCEDiskAttached(diskName, node)).To(BeTrue())
By("Deleting the Pod")
framework.ExpectNoError(framework.DeletePodWithWait(f, c, clientPod), "Failed to delete pod ", clientPod.Name)
By("Verifying Persistent Disk detach")
framework.ExpectNoError(waitForPDDetach(diskName, node), "PD ", diskName, " did not detach")
})
// Attach a persistent disk to a pod using a PVC.
// Delete the PV and then the pod. Expect the pod to succeed in unmounting and detaching PD on delete.
It("should test that deleting the PV before the pod does not cause pod deletion to fail on PD detach", func() {
By("Deleting the Persistent Volume")
framework.ExpectNoError(framework.DeletePersistentVolume(c, pv.Name), "Failed to delete PV ", pv.Name)
Expect(verifyGCEDiskAttached(diskName, node)).To(BeTrue())
By("Deleting the client pod")
framework.ExpectNoError(framework.DeletePodWithWait(f, c, clientPod), "Failed to delete pod ", clientPod.Name)
By("Verifying Persistent Disk detaches")
framework.ExpectNoError(waitForPDDetach(diskName, node), "PD ", diskName, " did not detach")
})
// Test that a Pod and PVC attached to a GCEPD successfully unmounts and detaches when the encompassing Namespace is deleted.
It("should test that deleting the Namespace of a PVC and Pod causes the successful detach of Persistent Disk", func() {
By("Deleting the Namespace")
err := c.CoreV1().Namespaces().Delete(ns, nil)
framework.ExpectNoError(err)
err = framework.WaitForNamespacesDeleted(c, []string{ns}, framework.DefaultNamespaceDeletionTimeout)
framework.ExpectNoError(err)
By("Verifying Persistent Disk detaches")
framework.ExpectNoError(waitForPDDetach(diskName, node), "PD ", diskName, " did not detach")
})
})
| {
gceCloud, err := gce.GetGCECloud()
framework.ExpectNoError(err)
isAttached, err := gceCloud.DiskIsAttached(diskName, nodeName)
framework.ExpectNoError(err)
return isAttached
} |
models.py | # -*- coding: utf-8 -*-
import datetime
from django.db.models import Count
import os
from django.db import models
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.core.urlresolvers import reverse
from django.db.models.signals import post_save
from django.dispatch import receiver
from uuslug import uuslug as slugify
from sorl.thumbnail import ImageField |
def category_upload_path(instance, filename):
from utils import timestampbased_filename
category_slug = slugify(instance.title)
path = os.path.join(
'article_category',
category_slug,
timestampbased_filename(filename)
)
return path
def article_upload_path(instance, filename):
from utils import timestampbased_filename
article_slug = slugify(instance.title)
path = os.path.join(
'articles',
article_slug,
timestampbased_filename(filename)
)
return path
class ArticleCategory(models.Model):
title = models.CharField("Заголовок", max_length=255)
slug = models.SlugField("URL", unique=True)
image = ImageField("Изображение", upload_to=category_upload_path, blank=True, null=True)
image_alt = models.CharField("ALT изображения", max_length=255, blank=True, null=True)
image_title = models.CharField("TITLE изображения", max_length=255, blank=True, null=True)
add_watermark = models.BooleanField("Добавлять водяной знак?", default=False)
description = models.TextField("Описание", blank=True, null=True)
author = models.ForeignKey(User, verbose_name="Автор")
published = models.BooleanField("Опубликовано", default=True)
created = models.DateTimeField("Время создания", auto_now_add=True)
updated = models.DateTimeField("Время последнего обновления", auto_now=True)
visits_num = models.PositiveIntegerField("Кол. посещений", default=0, editable=False)
def set_tags(self, tags):
Tag.objects.update_tags(self, tags)
def get_tags(self, tags):
return Tag.objects.get_for_object(self)
def inc_visits(self):
self.visits_num += 1
self.save()
@property
def tags(self):
content_type = ContentType.objects.get_for_model(self)
try:
tagged_item = TaggedItem.objects.get(content_type=content_type, object_id=self.id)\
.prefetch_related('tags')
except TaggedItem.DoesNotExist:
return []
return tagged_item.tags.all()
def get_absolute_url(self):
return reverse("articlecategory_details", args=(self.slug, ))
def __unicode__(self):
return self.title
class Meta:
verbose_name = "Категория статей"
verbose_name_plural = "Категории статей"
class Article(models.Model):
title = models.CharField("Заголовок", max_length=255)
slug = models.SlugField("URL", unique=True)
old_id = models.IntegerField("Старый ID", unique=True, blank=True, null=True)
image = models.ImageField("Изображение", upload_to=article_upload_path,
blank=True, null=True
)
image_alt = models.CharField("ALT изображения", max_length=255, blank=True, null=True)
image_title = models.CharField("TITLE изображения", max_length=255, blank=True, null=True)
add_watermark = models.BooleanField("Добавлять водяной знак?", default=False)
description = models.TextField("Описание", blank=True, null=True)
body = models.TextField("Текст статьи")
author = models.ForeignKey(User, verbose_name="Автор")
category = models.ForeignKey(ArticleCategory, verbose_name="Категория", related_name="articles")
verified = models.BooleanField("Проверена", default=False)
published = models.BooleanField("Опубликовано", default=True)
pub_date = models.DateTimeField("Опубликовано", blank=True)
created = models.DateTimeField("Создано", auto_now_add=True)
updated = models.DateTimeField("Обновлено", auto_now=True)
visits_num = models.PositiveIntegerField("Кол. посещений", default=0, editable=False)
comments_num = models.PositiveIntegerField(u"Кол. коментариев", default=0, editable=False)
def inc_visits(self):
self.visits_num += 1
self.save()
@property
def num_comments(self):
comments = Comment.objects.filter(
content_type_id=ContentType.objects.get_for_model(self).id,
object_id=self.id
).annotate(answer_count=Count('answers')).values_list('answer_count', flat=True)
cnt = 0
for i in range(len(comments)):
cnt += 1 + comments[i]
return cnt
@property
def tags(self):
content_type = ContentType.objects.get_for_model(self)
try:
tagged_item = TaggedItem.objects.get(content_type=content_type, object_id=self.id)
except TaggedItem.DoesNotExist:
return []
return tagged_item.tags.all()
def save(self, *args, **kwargs):
if not self.pub_date:
self.pub_date = datetime.datetime.now()
super(Article, self).save(*args, **kwargs)
def get_absolute_url(self):
return reverse("article_details", args=(self.slug, ))
def get_contenttype_id(self):
return ContentType.objects.get_for_model(Article).id
def __unicode__(self):
return self.title
class Meta:
verbose_name = "Статья"
verbose_name_plural = "Статьи"
@receiver(post_save, sender=Article)
def publish_article_task(sender, instance, created, **kwargs):
from articles.tasks import publish_article
if not instance.published:
publish_article.apply_async(args=(instance, ), eta=instance.pub_date)
@receiver(post_save, sender=Article)
def article_watermark(sender, instance, created, **kwargs):
if not instance.add_watermark:
return
from utils import add_watermark
marked_img = add_watermark(instance.image)
if not marked_img:
return
instance.image = marked_img
instance.save()
@receiver(post_save, sender=ArticleCategory)
def articlecategory_watermark(sender, instance, created, **kwargs):
if not instance.add_watermark:
return
from utils import add_watermark
marked_img = add_watermark(instance.image)
if not marked_img:
return
instance.image = marked_img
instance.save() |
from tags.models import Tag, TaggedItem
from comments.models import Comment, CommentAnswer |
main.js | /**
* Copyright (c) 2006
* Martin Czuchra, Nicolas Peters, Daniel Polak, Willi Tscheschner
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
**/
var idCounter = 0;
var ID_PREFIX = "resource";
/**
* Main initialization method. To be called when loading
* of the document, including all scripts, is completed.
*/
function | () {
/* When the blank image url is not set programatically to a local
* representation, a spacer gif on the site of ext is loaded from the
* internet. This causes problems when internet or the ext site are not
* available. */
Ext.BLANK_IMAGE_URL = ORYX.BASE_FILE_PATH + 'lib/ext-2.0.2/resources/images/default/s.gif';
ORYX.Log.debug("Querying editor instances");
// Hack for WebKit to set the SVGElement-Classes
ORYX.Editor.setMissingClasses();
// use this hook to get initialized through the plugin in charge of loading the model
//window.onOryxResourcesLoaded();
}
/**
@namespace Global Oryx name space
@name ORYX
*/
if(!ORYX) {var ORYX = {};}
/**
* The Editor class.
* @class ORYX.Editor
* @extends Clazz
* @param {Object} config An editor object, passed to {@link ORYX.Editor#loadSerialized}
* @param {String} config.id Any ID that can be used inside the editor. If fullscreen=false, any HTML node with this id must be present to render the editor to this node.
* @param {boolean} [config.fullscreen=true] Render editor in fullscreen mode or not.
* @param {String} config.stencilset.url Stencil set URL.
* @param {String} [config.stencil.id] Stencil type used for creating the canvas.
* @param {Object} config.properties Any properties applied to the canvas.
*/
ORYX.Editor = {
/** @lends ORYX.Editor.prototype */
// Defines the global dom event listener
DOMEventListeners: new Hash(),
// Defines the selection
selection: [],
// Defines the current zoom level
zoomLevel:1.0,
construct: function(config) {
// initialization.
this._eventsQueue = [];
this.loadedPlugins = [];
this.pluginsData = [];
this.simulationChartTimeUnit = "";
this.simulationChartData = "";
this.simulationEventData = "";
this.simulationEventAggregationData = "";
this.simulationInstancesData = "";
this.simulationHTCostData = "";
this.simulationHTResourceData = "";
this.simulationChartTitle = "";
this.simulationChartId = "";
this.simulationChartNodeName = "";
this.simulationPathData = "";
this.simulationPathId = "";
this.simulationPathSVG = "";
this.localStorageSVG = "";
this.imagePreviewSVG = "";
//meta data about the model for the signavio warehouse
//directory, new, name, description, revision, model (the model data)
this.modelMetaData = config;
var model = config;
if(config.model) {
model = config.model;
}
this.updateViewLockState();
if(config.error) {
Ext.Msg.show({
title : 'Unable to open Process',
msg : "Process will be opened with XML Editor",
buttons: Ext.MessageBox.OK,
fn : function(text,btn) { parent.designeropeninxmleditortab(ORYX.UUID); }
});
}
this.id = model.resourceId;
if(!this.id) {
this.id = model.id;
if(!this.id) {
this.id = ORYX.Editor.provideId();
}
}
// Defines if the editor should be fullscreen or not
this.fullscreen = model.fullscreen || true;
// Initialize the eventlistener
this._initEventListener();
// Load particular stencilset
if(ORYX.CONFIG.BACKEND_SWITCH) {
var ssUrl = (model.stencilset.namespace||model.stencilset.url).replace("#", "%23");
ORYX.Core.StencilSet.loadStencilSet(ORYX.CONFIG.STENCILSET_HANDLER + ssUrl, this.id);
} else {
var ssUrl = model.stencilset.url;
ORYX.Core.StencilSet.loadStencilSet(ssUrl, this.id);
}
//load the extensions
if(!!ORYX.CONFIG.SSEXTS){
ORYX.CONFIG.SSEXTS.each(function(ssext){
this.loadSSExtension(ssext);
}.bind(this));
}
// CREATES the canvas
this._createCanvas(model.stencil ? model.stencil.id : null, model.properties);
// GENERATES the whole EXT.VIEWPORT
this._generateGUI(config);
// Initializing of a callback to check loading ends
var loadPluginFinished = false;
var loadContentFinished = false;
var initFinished = function(){
if( !loadPluginFinished || !loadContentFinished ){ return }
this._finishedLoading();
}.bind(this)
// disable key events when Ext modal window is active
ORYX.Editor.makeExtModalWindowKeysave(this._getPluginFacade());
// LOAD the plugins
window.setTimeout(function(){
this.loadPlugins();
loadPluginFinished = true;
initFinished();
}.bind(this), 100);
// LOAD the content of the current editor instance
window.setTimeout(function(){
this.loadSerialized(model);
this.getCanvas().update();
loadContentFinished = true;
initFinished();
}.bind(this), 200);
},
updateViewLockState: function() {
if(ORYX.INSTANCE_VIEW_MODE != true) {
if ( (typeof parent.isLocked === "function") && (typeof parent.isLockedByCurrentUser === "function") ) {
var isEditorLocked = parent.isLocked();
var isEditorLockedByCurrentUser = parent.isLockedByCurrentUser();
var isReadOnly = ( ORYX.READONLY == true ) || ( ORYX.VIEWLOCKED == true );
if(!isEditorLocked) {
ORYX.VIEWLOCKED = false;
} else {
if(isEditorLocked && !isEditorLockedByCurrentUser) {
ORYX.VIEWLOCKED = true;
} else if(isEditorLocked && isEditorLockedByCurrentUser) {
ORYX.VIEWLOCKED = false;
}
}
// We're in read only mode, but got the lock, so let's reload to enter edit mode.
if (isReadOnly && !ORYX.VIEWLOCKED) {
if (typeof parent.reload === "function") {
ORYX.PROCESS_SAVED = true;
parent.reload();
}
}
}
}
},
_finishedLoading: function() {
if(Ext.getCmp('oryx-loading-panel')){
Ext.getCmp('oryx-loading-panel').hide()
}
// Do Layout for viewport
this.layout.doLayout();
// Generate a drop target
new Ext.dd.DropTarget(this.getCanvas().rootNode.parentNode);
// Fixed the problem that the viewport can not
// start with collapsed panels correctly
if (ORYX.CONFIG.PANEL_RIGHT_COLLAPSED === true){
this.layout_regions.east.collapse();
}
if (ORYX.CONFIG.PANEL_LEFT_COLLAPSED === true){
this.layout_regions.west.collapse();
}
// Raise Loaded Event
this.handleEvents( {type:ORYX.CONFIG.EVENT_LOADED} );
},
_initEventListener: function(){
// Register on Events
document.documentElement.addEventListener(ORYX.CONFIG.EVENT_KEYDOWN, this.catchKeyDownEvents.bind(this), true);
document.documentElement.addEventListener(ORYX.CONFIG.EVENT_KEYUP, this.catchKeyUpEvents.bind(this), true);
// Enable Key up and down Event
this._keydownEnabled = true;
this._keyupEnabled = true;
this.DOMEventListeners[ORYX.CONFIG.EVENT_MOUSEDOWN] = [];
this.DOMEventListeners[ORYX.CONFIG.EVENT_MOUSEUP] = [];
this.DOMEventListeners[ORYX.CONFIG.EVENT_MOUSEOVER] = [];
this.DOMEventListeners[ORYX.CONFIG.EVENT_MOUSEOUT] = [];
this.DOMEventListeners[ORYX.CONFIG.EVENT_SELECTION_CHANGED] = [];
this.DOMEventListeners[ORYX.CONFIG.EVENT_MOUSEMOVE] = [];
},
_chartSelected : function(node,event) {
this._getPluginFacade().raiseEvent({
type : ORYX.CONFIG.EVENT_SIMULATION_DISPLAY_GRAPH,
value : node
});
},
/**
* Generate the whole viewport of the
* Editor and initialized the Ext-Framework
*
*/
_generateGUI: function(config) {
//TODO make the height be read from eRDF data from the canvas.
// default, a non-fullscreen editor shall define its height by layout.setHeight(int)
// Defines the layout hight if it's NOT fullscreen
var layoutHeight = 660;
var canvasParent = this.getCanvas().rootNode.parentNode;
this.centerContentPanel = new Ext.Panel({
autoScroll: true,
cmargins: {left:0, right:0},
border: false,
items : {
layout : "fit",
autoHeight: true,
el : canvasParent
}
});
this.resultsChartPanel = new Ext.Panel({
border: false,
id: 'simchart',
html: "<svg></svg>"
});
this.simResultsContentPanel = new Ext.Panel({
id: "simresultscontent",
autoScroll: true,
autoheight: true,
border: false,
items : [{
xtype : "component",
id : 'simchartframe',
anchor: '100%',
autoScroll: true,
autoEl : {
tag : "iframe",
src : ORYX.BASE_FILE_PATH + 'simulation/default.jsp',
width: "100%",
height: "500",
frameborder: "0",
scrolling: "auto"
}
}]
});
this.simInfoPanel = new Ext.Panel({
bodyStyle:'background:#ffff;font-size:9px;font-family:Verdana, Geneva, Arial, Helvetica, sans-serif;padding-left:5px;',
id: "siminfopanel",
title: ORYX.I18N.View.sim.resultsInfo,
autoScroll: true,
autoheight: false,
height: 300,
border: false,
html: ""
});
this.simResultsTree = new Ext.tree.TreePanel({
id: "simresultscharts",
title: ORYX.I18N.View.sim.resultsGraphs,
//layout: 'fit',
animate:true,
loader: new Ext.tree.TreeLoader(),
rootVisible: false,
scroll: true,
autoScroll: true,
autoheight: true,
viewConfig : {
style : { overflow: 'scroll', overflowY: 'scroll', overflowX: 'scroll' }
},
lines: true,
listeners: {
click: {
fn:this._chartSelected.bind(this)
}
}
});
var simTreeRoot = new Ext.tree.TreeNode({
draggable: false,
id: 'simcharts'
});
this.simResultsTree.setRootNode(simTreeRoot);
this.simResultsContentPanelLayout = new Ext.Panel({
width: "100%",
autoscroll: true,
//sheight: 1000,
//autoheight: true,
layout: 'border',
items: [{
xtype:'panel',
region:'east',
margins: '5 0 0 5',
layout : 'fit',
anchor:'100%',
width: 300,
border: false,
collapsible: true,
autoscroll: true,
split: false,
//minSize: 100,
//maxSize: 500,
//autoheight: true,
cmargins: '5 5 0 5',
bodyCfg : { style: {'overflow':'auto'} },
autoScroll : true,
items: [this.simResultsTree, this.simInfoPanel]
},{
xtype:'panel',
region: 'center',
layout : 'fit',
anchor:'100%',
border: false,
autoscroll: true,
autoheight: true,
margins: '5 5 0 0',
items: [this.simResultsContentPanel]
}]
});
var tabs_config = {
id : 'maintabs',
region : 'center',
cls : 'x-panel-editor-center',
autoScroll: false,
cmargins: {left:0, right:0},
activeTab: 0,
border: false,
tabPosition: 'top',
anchor: "100%",
deferredRender : false,
listeners: {
tabchange: function(tabpanel, tab) {
this.centerContentTabPannel.doLayout();
this.simResultsContentPanelLayout.doLayout();
tabpanel.doLayout();
}.bind(this)
},
items: [{
layout: "fit",
title: ORYX.I18N.View.tabs.modelling,
id: 'processmodellingtab',
items : [this.centerContentPanel]
},
{
layout: "fit",
title: ORYX.I18N.View.tabs.simResults,
id: 'simulationtab',
autoScroll : false,
items : [this.simResultsContentPanelLayout]
}
]
};
this.centerContentTabPannel = new Ext.TabPanel(tabs_config);
if(ORYX.READONLY == true) {
Ext.getCmp('maintabs').remove("simulationtab");
}
if(ORYX.VIEWLOCKED == true) {
Ext.getCmp('maintabs').remove("simulationtab");
}
// DEFINITION OF THE VIEWPORT AREAS
var eastWidth = ORYX.CONFIG.PANEL_LEFT_WIDTH || 400;
if(ORYX.READONLY == true) {
eastWidth = 10;
}
if(ORYX.VIEWLOCKED == true) {
eastWidth = 10;
}
this.layout_regions = {
// DEFINES TOP-AREA
north : new Ext.Panel({ //TOOO make a composite of the oryx header and addable elements (for toolbar), second one should contain margins
region : 'north',
cls : 'x-panel-editor-north',
autoEl : 'div',
border : false
}),
// DEFINES RIGHT-AREA
east : new Ext.Panel({
region : 'east',
layout : 'anchor',
autoEl : 'div',
border: false,
cls : 'x-panel-editor-east',
width : eastWidth,
autoScroll:true,
split : false,
animate: true,
collapsible : true,
titleCollapse: true,
title: "Properties",
plugins: new Ext.ux.PanelCollapsedTitlePlugin()
}),
// DEFINES BOTTOM-AREA
south : new Ext.Panel({
region : 'south',
cls : 'x-panel-editor-south',
autoEl : 'div',
border : false
}),
// DEFINES LEFT-AREA
west : new Ext.Panel({
region : 'west',
layout : 'anchor',
autoEl : 'div',
border: false,
cls : 'x-panel-editor-west',
width : ORYX.CONFIG.PANEL_LEFT_WIDTH || 200,
autoScroll:true,
split : false,
animate: true,
collapsible : true,
titleCollapse: true,
title: ORYX.I18N.main.shapeRepo,
plugins: new Ext.ux.PanelCollapsedTitlePlugin()
}),
// DEFINES CENTER-AREA (FOR THE EDITOR)
center : this.centerContentTabPannel
};
// Hide every region except the center
for (region in this.layout_regions) {
if ( (region != "center" && region != "north") && (ORYX.READONLY == true || ORYX.VIEWLOCKED == true)) {
this.layout_regions[ region ].setVisible(false);
}
}
// Config for the Ext.Viewport
var layout_config = {
layout: 'border',
items: [
this.layout_regions.north,
this.layout_regions.east,
this.layout_regions.south,
this.layout_regions.west,
this.layout_regions.center
]
};
this.contentviewport = new Ext.Viewport( layout_config );
//this.fullscreen = config.fullscreen;
if (this.fullscreen) {
this.layout = new Ext.Viewport( layout_config );
} else {
layout_config.renderTo = this.id;
layout_config.height = layoutHeight;
this.layout = new Ext.Panel( layout_config );
}
// Set the editor to the center, and refresh the size
canvasParent.parentNode.setAttributeNS(null, 'align', 'center');
canvasParent.setAttributeNS(null, 'align', 'left');
this.getCanvas().setSize({
width : ORYX.CONFIG.CANVAS_WIDTH,
height : ORYX.CONFIG.CANVAS_HEIGHT
});
},
/**
* adds a component to the specified region
*
* @param {String} region
* @param {Ext.Component} component
* @param {String} title, optional
* @return {Ext.Component} dom reference to the current region or null if specified region is unknown
*/
addToRegion: function(region, component, title) {
if (region.toLowerCase && this.layout_regions[region.toLowerCase()]) {
var current_region = this.layout_regions[region.toLowerCase()];
current_region.add(component);
/*if( (region.toLowerCase() == 'east' || region.toLowerCase() == 'west') && current_region.items.length == 2){ //!current_region.getLayout() instanceof Ext.layout.Accordion ){
var layout = new Ext.layout.Accordion( current_region.layoutConfig );
current_region.setLayout( layout );
var items = current_region.items.clone();
current_region.items.each(function(item){ current_region.remove( item )})
items.each(function(item){ current_region.add( item )})
} */
ORYX.Log.debug("original dimensions of region %0: %1 x %2", current_region.region, current_region.width, current_region.height)
// update dimensions of region if required.
if (!current_region.width && component.initialConfig && component.initialConfig.width) {
ORYX.Log.debug("resizing width of region %0: %1", current_region.region, component.initialConfig.width)
current_region.setWidth(component.initialConfig.width)
}
if (component.initialConfig && component.initialConfig.height) {
ORYX.Log.debug("resizing height of region %0: %1", current_region.region, component.initialConfig.height)
var current_height = current_region.height || 0;
current_region.height = component.initialConfig.height + current_height;
current_region.setHeight(component.initialConfig.height + current_height)
}
// set title if provided as parameter.
if (typeof title == "string") {
current_region.setTitle(title);
}
// trigger doLayout() and show the pane
current_region.ownerCt.doLayout();
if((ORYX.VIEWLOCKED == true || ORYX.READONLY == true) && current_region.region != "center" ) {
} else {
current_region.show();
}
if(Ext.isMac)
ORYX.Editor.resizeFix();
return current_region;
}
return null;
},
getAvailablePlugins: function(){
var curAvailablePlugins=ORYX.availablePlugins.clone();
curAvailablePlugins.each(function(plugin){
if(this.loadedPlugins.find(function(loadedPlugin){
return loadedPlugin.type==this.name;
}.bind(plugin))){
plugin.engaged=true;
}else{
plugin.engaged=false;
}
}.bind(this));
return curAvailablePlugins;
},
loadScript: function (url, callback){
var script = document.createElement("script")
script.type = "text/javascript";
if (script.readyState){ //IE
script.onreadystatechange = function(){
if (script.readyState == "loaded" || script.readyState == "complete"){
script.onreadystatechange = null;
callback();
}
};
} else { //Others
script.onload = function(){
callback();
};
}
script.src = url;
document.getElementsByTagName("head")[0].appendChild(script);
},
/**
* activate Plugin
*
* @param {String} name
* @param {Function} callback
* callback(sucess, [errorCode])
* errorCodes: NOTUSEINSTENCILSET, REQUIRESTENCILSET, NOTFOUND, YETACTIVATED
*/
activatePluginByName: function(name, callback, loadTry){
var match=this.getAvailablePlugins().find(function(value){return value.name==name});
if(match && (!match.engaged || (match.engaged==='false'))){
var loadedStencilSetsNamespaces = this.getStencilSets().keys();
var facade = this._getPluginFacade();
var newPlugin;
var me=this;
ORYX.Log.debug("Initializing plugin '%0'", match.name);
if (!match.requires || !match.requires.namespaces || match.requires.namespaces.any(function(req){ return loadedStencilSetsNamespaces.indexOf(req) >= 0 }) ){
if(!match.notUsesIn || !match.notUsesIn.namespaces || !match.notUsesIn.namespaces.any(function(req){ return loadedStencilSetsNamespaces.indexOf(req) >= 0 })){
try {
var className = eval(match.name);
var newPlugin = new className(facade, match);
newPlugin.type = match.name;
// If there is an GUI-Plugin, they get all Plugins-Offer-Meta-Data
if (newPlugin.registryChanged)
newPlugin.registryChanged(me.pluginsData);
// If there have an onSelection-Method it will pushed to the Editor Event-Handler
if (newPlugin.onSelectionChanged)
me.registerOnEvent(ORYX.CONFIG.EVENT_SELECTION_CHANGED, newPlugin.onSelectionChanged.bind(newPlugin));
this.loadedPlugins.push(newPlugin);
this.loadedPlugins.each(function(loaded){
if(loaded.registryChanged)
loaded.registryChanged(this.pluginsData);
}.bind(me));
callback(true);
} catch(e) {
ORYX.Log.warn("Plugin %0 is not available", match.name);
if(!!loadTry){
callback(false,"INITFAILED");
return;
}
this.loadScript("plugins/scripts/"+match.source, this.activatePluginByName.bind(this,match.name,callback,true));
}
}else{
callback(false,"NOTUSEINSTENCILSET");
ORYX.Log.info("Plugin need a stencilset which is not loaded'", match.name);
}
} else {
callback(false,"REQUIRESTENCILSET");
ORYX.Log.info("Plugin need a stencilset which is not loaded'", match.name);
}
}else{
callback(false, match?"NOTFOUND":"YETACTIVATED");
//TODO error handling
}
},
/**
* Laden der Plugins
*/
loadPlugins: function() {
// if there should be plugins but still are none, try again.
// TODO this should wait for every plugin respectively.
/*if (!ORYX.Plugins && ORYX.availablePlugins.length > 0) {
window.setTimeout(this.loadPlugins.bind(this), 100);
return;
}*/
var me = this;
var newPlugins = [];
var loadedStencilSetsNamespaces = this.getStencilSets().keys();
// Available Plugins will be initalize
var facade = this._getPluginFacade();
// If there is an Array where all plugins are described, than only take those
// (that comes from the usage of oryx with a mashup api)
if( ORYX.MashupAPI && ORYX.MashupAPI.loadablePlugins && ORYX.MashupAPI.loadablePlugins instanceof Array ){
// Get the plugins from the available plugins (those who are in the plugins.xml)
ORYX.availablePlugins = $A(ORYX.availablePlugins).findAll(function(value){
return ORYX.MashupAPI.loadablePlugins.include( value.name )
})
// Add those plugins to the list, which are only in the loadablePlugins list
ORYX.MashupAPI.loadablePlugins.each(function( className ){
if( !(ORYX.availablePlugins.find(function(val){ return val.name == className }))){
ORYX.availablePlugins.push( {name: className } );
}
})
}
ORYX.availablePlugins.each(function(value) {
ORYX.Log.debug("Initializing plugin '%0'", value.name);
if( (!value.requires || !value.requires.namespaces || value.requires.namespaces.any(function(req){ return loadedStencilSetsNamespaces.indexOf(req) >= 0 }) ) &&
(!value.notUsesIn || !value.notUsesIn.namespaces || !value.notUsesIn.namespaces.any(function(req){ return loadedStencilSetsNamespaces.indexOf(req) >= 0 }) )&&
/*only load activated plugins or undefined */
(value.engaged || (value.engaged===undefined)) ){
try {
var className = eval(value.name);
if( className ){
var plugin = new className(facade, value);
plugin.type = value.name;
newPlugins.push( plugin );
plugin.engaged=true;
}
} catch(e) {
ORYX.Log.warn("Plugin %0 is not available", value.name);
}
} else {
ORYX.Log.info("Plugin need a stencilset which is not loaded'", value.name);
}
});
newPlugins.each(function(value) {
// If there is an GUI-Plugin, they get all Plugins-Offer-Meta-Data
if(value.registryChanged)
value.registryChanged(me.pluginsData);
// If there have an onSelection-Method it will pushed to the Editor Event-Handler
if(value.onSelectionChanged)
me.registerOnEvent(ORYX.CONFIG.EVENT_SELECTION_CHANGED, value.onSelectionChanged.bind(value));
});
this.loadedPlugins = newPlugins;
// Hack for the Scrollbars
if(Ext.isMac) {
ORYX.Editor.resizeFix();
}
this.registerPluginsOnKeyEvents();
this.setSelection();
},
/**
* Creates the Canvas
* @param {String} [stencilType] The stencil type used for creating the canvas. If not given, a stencil with myBeRoot = true from current stencil set is taken.
* @param {Object} [canvasConfig] Any canvas properties (like language).
*/
_createCanvas: function(stencilType, canvasConfig) {
if (stencilType) {
// Add namespace to stencilType
if (stencilType.search(/^http/) === -1) {
stencilType = this.getStencilSets().values()[0].namespace() + stencilType;
}
}
else {
// Get any root stencil type
stencilType = this.getStencilSets().values()[0].findRootStencilName();
}
// get the stencil associated with the type
var canvasStencil = ORYX.Core.StencilSet.stencil(stencilType);
if (!canvasStencil)
ORYX.Log.fatal("Initialisation failed, because the stencil with the type %0 is not part of one of the loaded stencil sets.", stencilType);
// create all dom
// TODO fix border, so the visible canvas has a double border and some spacing to the scrollbars
var div = ORYX.Editor.graft("http://www.w3.org/1999/xhtml", null, ['div']);
// set class for custom styling
div.addClassName("ORYX_Editor");
// create the canvas
this._canvas = new ORYX.Core.Canvas({
width : ORYX.CONFIG.CANVAS_WIDTH,
height : ORYX.CONFIG.CANVAS_HEIGHT,
'eventHandlerCallback' : this.handleEvents.bind(this),
id : this.id,
parentNode : div
}, canvasStencil);
if (canvasConfig) {
// Migrate canvasConfig to an RDF-like structure
//FIXME this isn't nice at all because we don't want rdf any longer
var properties = [];
for(field in canvasConfig){
properties.push({
prefix: 'oryx',
name: field,
value: canvasConfig[field]
});
}
this._canvas.deserialize(properties);
}
},
/**
* Returns a per-editor singleton plugin facade.
* To be used in plugin initialization.
*/
_getPluginFacade: function() {
// if there is no pluginfacade already created:
if(!(this._pluginFacade))
// create it.
this._pluginFacade = {
activatePluginByName: this.activatePluginByName.bind(this),
//deactivatePluginByName: this.deactivatePluginByName.bind(this),
getAvailablePlugins: this.getAvailablePlugins.bind(this),
offer: this.offer.bind(this),
getStencilSets: this.getStencilSets.bind(this),
getRules: this.getRules.bind(this),
loadStencilSet: this.loadStencilSet.bind(this),
createShape: this.createShape.bind(this),
deleteShape: this.deleteShape.bind(this),
getSelection: this.getSelection.bind(this),
setSelection: this.setSelection.bind(this),
updateSelection: this.updateSelection.bind(this),
getCanvas: this.getCanvas.bind(this),
importJSON: this.importJSON.bind(this),
importERDF: this.importERDF.bind(this),
getERDF: this.getERDF.bind(this),
getJSON: this.getJSON.bind(this),
getSerializedJSON: this.getSerializedJSON.bind(this),
checkParsingErrors: this.checkParsingErrors.bind(this),
showParsingErrors: this.showParsingErrors.bind(this),
executeCommands: this.executeCommands.bind(this),
registerOnEvent: this.registerOnEvent.bind(this),
unregisterOnEvent: this.unregisterOnEvent.bind(this),
raiseEvent: this.handleEvents.bind(this),
enableEvent: this.enableEvent.bind(this),
disableEvent: this.disableEvent.bind(this),
eventCoordinates: this.eventCoordinates.bind(this),
addToRegion: this.addToRegion.bind(this),
getModelMetaData: this.getModelMetaData.bind(this)
};
// return it.
return this._pluginFacade;
},
/**
* Implementes the command pattern
* (The real usage of the command pattern
* is implemented and shown in the Plugins/undo.js)
*
* @param <Oryx.Core.Command>[] Array of commands
*/
executeCommands: function(commands){
// Check if the argument is an array and the elements are from command-class
if ( commands instanceof Array &&
commands.length > 0 &&
commands.all(function(command){ return command instanceof ORYX.Core.Command }) ) {
// Raise event for executing commands
this.handleEvents({
type : ORYX.CONFIG.EVENT_EXECUTE_COMMANDS,
commands : commands
});
var ret;
// Execute every command
commands.each(function(command){
ret = command.execute();
})
return ret;
}
},
/**
* Returns JSON of underlying canvas (calls ORYX.Canvas#toJSON()).
* @return {Object} Returns JSON representation as JSON object.
*/
getJSON: function(){
var canvas = this.getCanvas().toJSON();
canvas.ssextensions = this.getStencilSets().values()[0].extensions().keys();
return canvas;
},
/**
* Serializes a call to toJSON().
* @return {String} Returns JSON representation as string.
*/
getSerializedJSON: function() {
return Ext.encode(this.getJSON());
},
checkParsingErrors : function() {
var processJSON = ORYX.EDITOR.getSerializedJSON();
var ajaxObj = new XMLHttpRequest;
var url = ORYX.PATH + "uuidRepository";
var params = "action=checkErrors&pp=" + ORYX.PREPROCESSING + "&profile=" + ORYX.PROFILE + "&data=" + encodeURIComponent(processJSON);
ajaxObj.open("POST",url,false);
ajaxObj.setRequestHeader("Content-type", "application/x-www-form-urlencoded");
ajaxObj.send(params);
if (ajaxObj.status == 200) {
if(ajaxObj.responseText == "true") {
return "true";
} else {
var formattedSvgDOM = DataManager.serialize(ORYX.EDITOR.getCanvas().getSVGRepresentation(false));
var rawSvgDOM = DataManager.serialize(ORYX.EDITOR.getCanvas().getRootNode().cloneNode(true));
var processJSON = ORYX.EDITOR.getSerializedJSON();
var processId = jsonPath(processJSON.evalJSON(), "$.properties.id");
Ext.Ajax.request({
url: ORYX.PATH + "transformer",
method: 'POST',
success: function(request) {
// be quiet
},
failure: function(){
Ext.Msg.minWidth = 400;
this.facade.raiseEvent({
type : ORYX.CONFIG.EVENT_NOTIFICATION_SHOW,
ntype : 'error',
msg : ORYX.I18N.main.failSave,
title : ''
});
},
params: {
fsvg: formattedSvgDOM,
rsvg: rawSvgDOM,
uuid: ORYX.UUID,
profile: ORYX.PROFILE,
transformto: 'svg',
processid: processId
}
});
return "false";
}
} else {
return "true";
}
},
showParsingErrors : function() {
Ext.Msg.minWidth = 360;
Ext.MessageBox.alert( ORYX.I18N.main.unableUserAction );
},
/**
* @return {String} Returns eRDF representation.
* @deprecated Use ORYX.Editor#getJSON instead, if possible.
*/
getERDF:function(){
// Get the serialized dom
var serializedDOM = DataManager.serializeDOM( this._getPluginFacade() );
// Add xml definition if there is no
serializedDOM = '<?xml version="1.0" encoding="utf-8"?>' +
'<html xmlns="http://www.w3.org/1999/xhtml" ' +
'xmlns:b3mn="http://b3mn.org/2007/b3mn" ' +
'xmlns:ext="http://b3mn.org/2007/ext" ' +
'xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" ' +
'xmlns:atom="http://b3mn.org/2007/atom+xhtml">' +
'<head profile="http://purl.org/NET/erdf/profile">' +
'<link rel="schema.dc" href="http://purl.org/dc/elements/1.1/" />' +
'<link rel="schema.dcTerms" href="http://purl.org/dc/terms/ " />' +
'<link rel="schema.b3mn" href="http://b3mn.org" />' +
'<link rel="schema.oryx" href="http://oryx-editor.org/" />' +
'<link rel="schema.raziel" href="http://raziel.org/" />' +
'<base href="' +
location.href.split("?")[0] +
'" />' +
'</head><body>' +
serializedDOM +
'</body></html>';
return serializedDOM;
},
/**
* Imports shapes in JSON as expected by {@link ORYX.Editor#loadSerialized}
* @param {Object|String} jsonObject The (serialized) json object to be imported
* @param {boolean } [noSelectionAfterImport=false] Set to true if no shapes should be selected after import
* @throws {SyntaxError} If the serialized json object contains syntax errors
*/
importJSON: function(jsonObject, noSelectionAfterImport) {
try {
jsonObject = this.renewResourceIds(jsonObject);
} catch(error){
throw error;
}
//check, if the imported json model can be loaded in this editor
// (stencil set has to fit)
if (!jsonObject.stencilset) {
this.facade.raiseEvent({
type : ORYX.CONFIG.EVENT_NOTIFICATION_SHOW,
ntype : 'error',
msg : ORYX.I18N.JSONImport.invalidJSON,
title : ORYX.I18N.JSONImport.title
});
return null;
}
if(jsonObject.stencilset.namespace && jsonObject.stencilset.namespace !== this.getCanvas().getStencil().stencilSet().namespace()) {
this.facade.raiseEvent({
type : ORYX.CONFIG.EVENT_NOTIFICATION_SHOW,
ntype : 'error',
msg : String.format(ORYX.I18N.JSONImport.wrongSS, jsonObject.stencilset.namespace, this.getCanvas().getStencil().stencilSet().namespace()),
title : ORYX.I18N.JSONImport.title
});
return null;
} else {
var commandClass = ORYX.Core.Command.extend({
construct: function(jsonObject, loadSerializedCB, noSelectionAfterImport, facade){
this.jsonObject = jsonObject;
this.noSelection = noSelectionAfterImport;
this.facade = facade;
this.shapes;
this.connections = [];
this.parents = new Hash();
this.selection = this.facade.getSelection();
this.loadSerialized = loadSerializedCB;
},
execute: function(){
if (!this.shapes) {
// Import the shapes out of the serialization
this.shapes = this.loadSerialized( this.jsonObject );
//store all connections
this.shapes.each(function(shape) {
if (shape.getDockers) {
var dockers = shape.getDockers();
if (dockers) {
if (dockers.length > 0) {
this.connections.push([dockers.first(), dockers.first().getDockedShape(), dockers.first().referencePoint]);
}
if (dockers.length > 1) {
this.connections.push([dockers.last(), dockers.last().getDockedShape(), dockers.last().referencePoint]);
}
}
}
//store parents
this.parents[shape.id] = shape.parent;
}.bind(this));
} else {
this.shapes.each(function(shape) {
this.parents[shape.id].add(shape);
}.bind(this));
this.connections.each(function(con) {
con[0].setDockedShape(con[1]);
con[0].setReferencePoint(con[2]);
//con[0].update();
});
}
//this.parents.values().uniq().invoke("update");
this.facade.getCanvas().update();
if(!this.noSelection)
this.facade.setSelection(this.shapes);
else
this.facade.updateSelection();
},
rollback: function(){
var selection = this.facade.getSelection();
this.shapes.each(function(shape) {
selection = selection.without(shape);
this.facade.deleteShape(shape);
}.bind(this));
/*this.parents.values().uniq().each(function(parent) {
if(!this.shapes.member(parent))
parent.update();
}.bind(this));*/
this.facade.getCanvas().update();
this.facade.setSelection(selection);
}
})
var command = new commandClass(jsonObject,
this.loadSerialized.bind(this),
noSelectionAfterImport,
this._getPluginFacade());
this.executeCommands([command]);
return command.shapes.clone();
}
},
/**
* This method renew all resource Ids and according references.
* Warning: The implementation performs a substitution on the serialized object for
* easier implementation. This results in a low performance which is acceptable if this
* is only used when importing models.
* @param {Object|String} jsonObject
* @throws {SyntaxError} If the serialized json object contains syntax errors.
* @return {Object} The jsonObject with renewed ids.
* @private
*/
renewResourceIds: function(jsonObject){
// For renewing resource ids, a serialized and object version is needed
if(Ext.type(jsonObject) === "string"){
try {
var serJsonObject = jsonObject;
jsonObject = Ext.decode(jsonObject);
} catch(error){
throw new SyntaxError(error.message);
}
} else {
var serJsonObject = Ext.encode(jsonObject);
}
// collect all resourceIds recursively
var collectResourceIds = function(shapes){
if(!shapes) return [];
return shapes.map(function(shape){
return collectResourceIds(shape.childShapes).concat(shape.resourceId);
}).flatten();
}
var resourceIds = collectResourceIds(jsonObject.childShapes);
// Replace each resource id by a new one
resourceIds.each(function(oldResourceId) {
var newResourceId = ORYX.Editor.provideId();
serJsonObject = serJsonObject.gsub('"'+oldResourceId+'"', '"'+newResourceId+'"')
});
return Ext.decode(serJsonObject);
},
/**
* Import erdf structure to the editor
*
*/
importERDF: function( erdfDOM ){
var serialized = this.parseToSerializeObjects( erdfDOM );
if(serialized)
return this.importJSON(serialized, true);
},
/**
* Parses one model (eRDF) to the serialized form (JSON)
*
* @param {Object} oneProcessData
* @return {Object} The JSON form of given eRDF model, or null if it couldn't be extracted
*/
parseToSerializeObjects: function( oneProcessData ){
// Firefox splits a long text node into chunks of 4096 characters.
// To prevent truncation of long property values the normalize method must be called
if(oneProcessData.normalize) oneProcessData.normalize();
try {
var xsl = "";
var source=ORYX.PATH + "lib/extract-rdf.xsl";
new Ajax.Request(source, {
asynchronous: false,
method: 'get',
onSuccess: function(transport){
xsl = transport.responseText
}.bind(this),
onFailure: (function(transport){
ORYX.Log.error("XSL load failed" + transport);
}).bind(this)
});
var domParser = new DOMParser();
var xmlObject = oneProcessData;
var xslObject = domParser.parseFromString(xsl, "text/xml");
var xsltProcessor = new XSLTProcessor();
var xslRef = document.implementation.createDocument("", "", null);
xsltProcessor.importStylesheet(xslObject);
var new_rdf = xsltProcessor.transformToFragment(xmlObject, document);
var serialized_rdf = (new XMLSerializer()).serializeToString(new_rdf);
} catch(e){
this.facade.raiseEvent({
type : ORYX.CONFIG.EVENT_NOTIFICATION_SHOW,
ntype : 'error',
msg : ORYX.I18N.BPELSupport.error+': ' + e,
title : ''
});
var serialized_rdf = "";
}
// Firefox 2 to 3 problem?!
serialized_rdf = !serialized_rdf.startsWith("<?xml") ? "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" + serialized_rdf : serialized_rdf;
var req = new Ajax.Request(ORYX.CONFIG.ROOT_PATH+"rdf2json", {
method: 'POST',
asynchronous: false,
onSuccess: function(transport) {
Ext.decode(transport.responseText);
},
parameters: {
rdf: serialized_rdf
}
});
return Ext.decode(req.transport.responseText);
},
/**
* Loads serialized model to the oryx.
* @example
* editor.loadSerialized({
* resourceId: "mymodel1",
* childShapes: [
* {
* stencil:{ id:"Subprocess" },
* outgoing:[{resourceId: 'aShape'}],
* target: {resourceId: 'aShape'},
* bounds:{ lowerRight:{ y:510, x:633 }, upperLeft:{ y:146, x:210 } },
* resourceId: "myshape1",
* childShapes:[],
* properties:{},
* }
* ],
* properties:{
* language: "English"
* },
* stencilset:{
* url:"http://localhost:8080/oryx/stencilsets/bpmn1.1/bpmn1.1.json"
* },
* stencil:{
* id:"BPMNDiagram"
* }
* });
* @param {Object} model Description of the model to load.
* @param {Array} [model.ssextensions] List of stenctil set extensions.
* @param {String} model.stencilset.url
* @param {String} model.stencil.id
* @param {Array} model.childShapes
* @param {Array} [model.properties]
* @param {String} model.resourceId
* @return {ORYX.Core.Shape[]} List of created shapes
* @methodOf ORYX.Editor.prototype
*/
loadSerialized: function( model ){
var canvas = this.getCanvas();
// Bugfix (cf. http://code.google.com/p/oryx-editor/issues/detail?id=240)
// Deserialize the canvas' stencil set extensions properties first!
this.loadSSExtensions(model.ssextensions);
var shapes = this.getCanvas().addShapeObjects(model.childShapes, this.handleEvents.bind(this));
if(model.properties) {
for(key in model.properties) {
var prop = model.properties[key];
if (!(typeof prop === "string")) {
prop = Ext.encode(prop);
}
this.getCanvas().setProperty("oryx-" + key, prop);
}
}
this.getCanvas().updateSize();
return shapes;
},
/**
* Calls ORYX.Editor.prototype.ss_extension_namespace for each element
* @param {Array} ss_extension_namespaces An array of stencil set extension namespaces.
*/
loadSSExtensions: function(ss_extension_namespaces){
if(!ss_extension_namespaces) return;
ss_extension_namespaces.each(function(ss_extension_namespace){
this.loadSSExtension(ss_extension_namespace);
}.bind(this));
},
/**
* Loads a stencil set extension.
* The stencil set extensions definiton file must already
* be loaded when the editor is initialized.
*/
loadSSExtension: function(extension) {
if (!extension) {
return;
}
var stencilset = this.getStencilSets()[extension["extends"]];
if (!stencilset) {
return;
}
stencilset.addExtension(extension);
//stencilset.addExtension("/oryx/build/stencilsets/extensions/" + extension["definition"])
this.getRules().initializeRules(stencilset);
this._getPluginFacade().raiseEvent({
type: ORYX.CONFIG.EVENT_STENCIL_SET_LOADED
});
},
disableEvent: function(eventType){
if(eventType == ORYX.CONFIG.EVENT_KEYDOWN) {
this._keydownEnabled = false;
}
if(eventType == ORYX.CONFIG.EVENT_KEYUP) {
this._keyupEnabled = false;
}
if(this.DOMEventListeners.keys().member(eventType)) {
var value = this.DOMEventListeners.remove(eventType);
this.DOMEventListeners['disable_' + eventType] = value;
}
},
enableEvent: function(eventType){
if(eventType == ORYX.CONFIG.EVENT_KEYDOWN) {
this._keydownEnabled = true;
}
if(eventType == ORYX.CONFIG.EVENT_KEYUP) {
this._keyupEnabled = true;
}
if(this.DOMEventListeners.keys().member("disable_" + eventType)) {
var value = this.DOMEventListeners.remove("disable_" + eventType);
this.DOMEventListeners[eventType] = value;
}
},
/**
* Methods for the PluginFacade
*/
registerOnEvent: function(eventType, callback) {
if(!(this.DOMEventListeners.keys().member(eventType))) {
this.DOMEventListeners[eventType] = [];
}
this.DOMEventListeners[eventType].push(callback);
},
unregisterOnEvent: function(eventType, callback) {
if(this.DOMEventListeners.keys().member(eventType)) {
this.DOMEventListeners[eventType] = this.DOMEventListeners[eventType].without(callback);
} else {
// Event is not supported
// TODO: Error Handling
}
},
getSelection: function() {
return this.selection;
},
getStencilSets: function() {
return ORYX.Core.StencilSet.stencilSets(this.id);
},
getRules: function() {
return ORYX.Core.StencilSet.rules(this.id);
},
loadStencilSet: function(source) {
try {
ORYX.Core.StencilSet.loadStencilSet(source, this.id);
this.handleEvents({type:ORYX.CONFIG.EVENT_STENCIL_SET_LOADED});
} catch (e) {
ORYX.Log.warn("Requesting stencil set file failed. (" + e + ")");
}
},
offer: function(pluginData) {
if(!this.pluginsData.member(pluginData)){
this.pluginsData.push(pluginData);
}
},
/**
* It creates an new event or adds the callback, if already existing,
* for the key combination that the plugin passes in keyCodes attribute
* of the offer method.
*
* The new key down event fits the schema:
* key.event[.metactrl][.alt][.shift].'thekeyCode'
*/
registerPluginsOnKeyEvents: function() {
this.pluginsData.each(function(pluginData) {
if(pluginData.keyCodes) {
pluginData.keyCodes.each(function(keyComb) {
var eventName = "key.event";
/* Include key action */
eventName += '.' + keyComb.keyAction;
if(keyComb.metaKeys) {
/* Register on ctrl or apple meta key as meta key */
if(keyComb.metaKeys.
indexOf(ORYX.CONFIG.META_KEY_META_CTRL) > -1) {
eventName += "." + ORYX.CONFIG.META_KEY_META_CTRL;
}
/* Register on alt key as meta key */
if(keyComb.metaKeys.
indexOf(ORYX.CONFIG.META_KEY_ALT) > -1) {
eventName += '.' + ORYX.CONFIG.META_KEY_ALT;
}
/* Register on shift key as meta key */
if(keyComb.metaKeys.
indexOf(ORYX.CONFIG.META_KEY_SHIFT) > -1) {
eventName += '.' + ORYX.CONFIG.META_KEY_SHIFT;
}
}
/* Register on the actual key */
if(keyComb.keyCode) {
eventName += '.' + keyComb.keyCode;
}
/* Register the event */
ORYX.Log.debug("Register Plugin on Key Event: %0", eventName);
this.registerOnEvent(eventName,pluginData.functionality);
}.bind(this));
}
}.bind(this));
},
setSelection: function(elements, subSelectionElement, force) {
if (!elements) { elements = [] }
elements = elements.compact().findAll(function(n){ return n instanceof ORYX.Core.Shape });
if (elements.first() instanceof ORYX.Core.Canvas) {
elements = [];
}
if (!force && elements.length === this.selection.length && this.selection.all(function(r){ return elements.include(r) })){
return;
}
this.selection = elements;
this._subSelection = subSelectionElement;
this.handleEvents({type:ORYX.CONFIG.EVENT_SELECTION_CHANGED, elements:elements, subSelection: subSelectionElement})
},
updateSelection: function() {
this.setSelection(this.selection, this._subSelection, true);
/*var s = this.selection;
this.setSelection();
this.setSelection(s);*/
},
getCanvas: function() {
return this._canvas;
},
/**
* option = {
* type: string,
* position: {x:int, y:int},
* connectingType: uiObj-Class
* connectedShape: uiObj
* draggin: bool
* namespace: url
* parent: ORYX.Core.AbstractShape
* template: a template shape that the newly created inherits properties from.
* }
*/
createShape: function(option) {
if(option && option.serialize && option.serialize instanceof Array){
var type = option.serialize.find(function(obj){return (obj.prefix+"-"+obj.name) == "oryx-type"});
var stencil = ORYX.Core.StencilSet.stencil(type.value);
if(stencil.type() == 'node'){
var newShapeObject = new ORYX.Core.Node({'eventHandlerCallback':this.handleEvents.bind(this)}, stencil);
} else {
var newShapeObject = new ORYX.Core.Edge({'eventHandlerCallback':this.handleEvents.bind(this)}, stencil);
}
this.getCanvas().add(newShapeObject);
newShapeObject.deserialize(option.serialize);
return newShapeObject;
}
// If there is no argument, throw an exception
if(!option || !option.type || !option.namespace) { throw "To create a new shape you have to give an argument with type and namespace";}
var canvas = this.getCanvas();
var newShapeObject;
// Get the shape type
var shapetype = option.type;
// Get the stencil set
var sset = ORYX.Core.StencilSet.stencilSet(option.namespace);
// Create an New Shape, dependents on an Edge or a Node
if(sset.stencil(shapetype).type() == "node") {
newShapeObject = new ORYX.Core.Node({'eventHandlerCallback':this.handleEvents.bind(this)}, sset.stencil(shapetype))
} else {
newShapeObject = new ORYX.Core.Edge({'eventHandlerCallback':this.handleEvents.bind(this)}, sset.stencil(shapetype))
}
// when there is a template, inherit the properties.
if(option.template) {
newShapeObject._jsonStencil.properties = option.template._jsonStencil.properties;
newShapeObject.postProcessProperties();
}
// Add to the canvas
if(option.parent && newShapeObject instanceof ORYX.Core.Node) {
option.parent.add(newShapeObject);
} else {
canvas.add(newShapeObject);
}
// Set the position
var point = option.position ? option.position : {x:100, y:200};
var con;
// If there is create a shape and in the argument there is given an ConnectingType and is instance of an edge
if(option.connectingType && option.connectedShape && !(newShapeObject instanceof ORYX.Core.Edge)) {
// there will be create a new Edge
con = new ORYX.Core.Edge({'eventHandlerCallback':this.handleEvents.bind(this)}, sset.stencil(option.connectingType));
// And both endings dockers will be referenced to the both shapes
con.dockers.first().setDockedShape(option.connectedShape);
var magnet = option.connectedShape.getDefaultMagnet();
var cPoint = magnet ? magnet.bounds.center() : option.connectedShape.bounds.midPoint();
con.dockers.first().setReferencePoint( cPoint );
con.dockers.last().setDockedShape(newShapeObject);
con.dockers.last().setReferencePoint(newShapeObject.getDefaultMagnet().bounds.center());
// The Edge will be added to the canvas and be updated
canvas.add(con);
//con.update();
}
// Move the new Shape to the position
if(newShapeObject instanceof ORYX.Core.Edge && option.connectedShape) {
newShapeObject.dockers.first().setDockedShape(option.connectedShape);
if( option.connectedShape instanceof ORYX.Core.Node ){
newShapeObject.dockers.first().setReferencePoint(option.connectedShape.getDefaultMagnet().bounds.center());
newShapeObject.dockers.last().bounds.centerMoveTo(point);
} else {
newShapeObject.dockers.first().setReferencePoint(option.connectedShape.bounds.midPoint());
}
} else {
var b = newShapeObject.bounds
if( newShapeObject instanceof ORYX.Core.Node && newShapeObject.dockers.length == 1){
b = newShapeObject.dockers.first().bounds
}
b.centerMoveTo(point);
var upL = b.upperLeft();
b.moveBy( -Math.min(upL.x, 0) , -Math.min(upL.y, 0) )
var lwR = b.lowerRight();
b.moveBy( -Math.max(lwR.x-canvas.bounds.width(), 0) , -Math.max(lwR.y-canvas.bounds.height(), 0) )
}
// Update the shape
if (newShapeObject instanceof ORYX.Core.Edge) {
newShapeObject._update(false);
}
// And refresh the selection
if(!(newShapeObject instanceof ORYX.Core.Edge)) {
this.setSelection([newShapeObject]);
}
if(con && con.alignDockers) {
con.alignDockers();
}
if(newShapeObject.alignDockers) {
newShapeObject.alignDockers();
}
this._getPluginFacade().raiseEvent({
type : ORYX.CONFIG.EVENT_SHAPE_CREATED,
value : newShapeObject
});
return newShapeObject;
},
deleteShape: function(shape) {
if (!shape || !shape.parent){ return }
//remove shape from parent
// this also removes it from DOM
shape.parent.remove(shape);
//delete references to outgoing edges
shape.getOutgoingShapes().each(function(os) {
var docker = os.getDockers().first();
if(docker && docker.getDockedShape() == shape) {
docker.setDockedShape(undefined);
}
}.bind(this));
//delete references to incoming edges
shape.getIncomingShapes().each(function(is) {
var docker = is.getDockers().last();
if(docker && docker.getDockedShape() == shape) {
docker.setDockedShape(undefined);
}
}.bind(this));
//delete references of the shape's dockers
shape.getDockers().each(function(docker) {
docker.setDockedShape(undefined);
});
this._getPluginFacade().raiseEvent({
type : ORYX.CONFIG.EVENT_SHAPE_DELETED,
value : shape
});
},
/**
* Returns an object with meta data about the model.
* Like name, description, ...
*
* Empty object with the current backend.
*
* @return {Object} Meta data about the model
*/
getModelMetaData: function() {
return this.modelMetaData;
},
/* Event-Handler Methods */
/**
* Helper method to execute an event immediately. The event is not
* scheduled in the _eventsQueue. Needed to handle Layout-Callbacks.
*/
_executeEventImmediately: function(eventObj) {
if(this.DOMEventListeners.keys().member(eventObj.event.type)) {
this.DOMEventListeners[eventObj.event.type].each((function(value) {
value(eventObj.event, eventObj.arg);
}).bind(this));
}
},
_executeEvents: function() {
this._queueRunning = true;
while(this._eventsQueue.length > 0) {
var val = this._eventsQueue.shift();
this._executeEventImmediately(val);
}
this._queueRunning = false;
},
/**
* Leitet die Events an die Editor-Spezifischen Event-Methoden weiter
* @param {Object} event Event , welches gefeuert wurde
* @param {Object} uiObj Target-UiObj
*/
handleEvents: function(event, uiObj) {
ORYX.Log.trace("Dispatching event type %0 on %1", event.type, uiObj);
switch(event.type) {
case ORYX.CONFIG.EVENT_MOUSEDOWN:
this._handleMouseDown(event, uiObj);
break;
case ORYX.CONFIG.EVENT_MOUSEMOVE:
this._handleMouseMove(event, uiObj);
break;
case ORYX.CONFIG.EVENT_MOUSEUP:
this._handleMouseUp(event, uiObj);
break;
case ORYX.CONFIG.EVENT_MOUSEOVER:
this._handleMouseHover(event, uiObj);
break;
case ORYX.CONFIG.EVENT_MOUSEOUT:
this._handleMouseOut(event, uiObj);
break;
}
/* Force execution if necessary. Used while handle Layout-Callbacks. */
if(event.forceExecution) {
this._executeEventImmediately({event: event, arg: uiObj});
} else {
this._eventsQueue.push({event: event, arg: uiObj});
}
if(!this._queueRunning) {
this._executeEvents();
}
// TODO: Make this return whether no listener returned false.
// So that, when one considers bubbling undesireable, it won't happen.
return false;
},
catchKeyUpEvents: function(event) {
if(!this._keyupEnabled) {
return;
}
/* assure we have the current event. */
if (!event)
event = window.event;
// Checks if the event comes from some input field
if ( ["INPUT", "TEXTAREA"].include(event.target.tagName.toUpperCase()) ){
return;
}
/* Create key up event type */
var keyUpEvent = this.createKeyCombEvent(event, ORYX.CONFIG.KEY_ACTION_UP);
ORYX.Log.debug("Key Event to handle: %0", keyUpEvent);
/* forward to dispatching. */
this.handleEvents({type: keyUpEvent, event:event});
},
/**
* Catches all key down events and forward the appropriated event to
* dispatching concerning to the pressed keys.
*
* @param {Event}
* The key down event to handle
*/
catchKeyDownEvents: function(event) {
if(!this._keydownEnabled) {
return;
}
/* Assure we have the current event. */
if (!event)
event = window.event;
/* Fixed in FF3 */
// This is a mac-specific fix. The mozilla event object has no knowledge
// of meta key modifier on osx, however, it is needed for certain
// shortcuts. This fix adds the metaKey field to the event object, so
// that all listeners that registered per Oryx plugin facade profit from
// this. The original bug is filed in
// https://bugzilla.mozilla.org/show_bug.cgi?id=418334
//if (this.__currentKey == ORYX.CONFIG.KEY_CODE_META) {
// event.appleMetaKey = true;
//}
//this.__currentKey = pressedKey;
// Checks if the event comes from some input field
if ( ["INPUT", "TEXTAREA"].include(event.target.tagName.toUpperCase()) ){
return;
}
/* Create key up event type */
var keyDownEvent = this.createKeyCombEvent(event, ORYX.CONFIG.KEY_ACTION_DOWN);
ORYX.Log.debug("Key Event to handle: %0", keyDownEvent);
/* Forward to dispatching. */
this.handleEvents({type: keyDownEvent,event: event});
},
/**
* Creates the event type name concerning to the pressed keys.
*
* @param {Event} keyDownEvent
* The source keyDownEvent to build up the event name
*/
createKeyCombEvent: function(keyEvent, keyAction) {
/* Get the currently pressed key code. */
var pressedKey = keyEvent.which || keyEvent.keyCode;
//this.__currentKey = pressedKey;
/* Event name */
var eventName = "key.event";
/* Key action */
if(keyAction) {
eventName += "." + keyAction;
}
/* Ctrl or apple meta key is pressed */
if(keyEvent.ctrlKey || keyEvent.metaKey) {
eventName += "." + ORYX.CONFIG.META_KEY_META_CTRL;
}
/* Alt key is pressed */
if(keyEvent.altKey) {
eventName += "." + ORYX.CONFIG.META_KEY_ALT;
}
/* Alt key is pressed */
if(keyEvent.shiftKey) {
eventName += "." + ORYX.CONFIG.META_KEY_SHIFT;
}
/* Return the composed event name */
return eventName + "." + pressedKey;
},
_handleMouseDown: function(event, uiObj) {
// get canvas.
var canvas = this.getCanvas();
// Try to get the focus
canvas.focus()
// find the shape that is responsible for this element's id.
var element = event.currentTarget;
var elementController = uiObj;
// gather information on selection.
var currentIsSelectable = (elementController !== null) &&
(elementController !== undefined) && (elementController.isSelectable);
var currentIsMovable = (elementController !== null) &&
(elementController !== undefined) && (elementController.isMovable);
var modifierKeyPressed = event.shiftKey || event.ctrlKey;
var noObjectsSelected = this.selection.length === 0;
var currentIsSelected = this.selection.member(elementController);
// Rule #1: When there is nothing selected, select the clicked object.
if(currentIsSelectable && noObjectsSelected) {
this.setSelection([elementController]);
ORYX.Log.trace("Rule #1 applied for mouse down on %0", element.id);
// Rule #3: When at least one element is selected, and there is no
// control key pressed, and the clicked object is not selected, select
// the clicked object.
} else if(currentIsSelectable && !noObjectsSelected &&
!modifierKeyPressed && !currentIsSelected) {
this.setSelection([elementController]);
//var objectType = elementController.readAttributes();
//alert(objectType[0] + ": " + objectType[1]);
ORYX.Log.trace("Rule #3 applied for mouse down on %0", element.id);
// Rule #4: When the control key is pressed, and the current object is
// not selected, add it to the selection.
} else if(currentIsSelectable && modifierKeyPressed
&& !currentIsSelected) {
var newSelection = this.selection.clone();
newSelection.push(elementController)
this.setSelection(newSelection)
ORYX.Log.trace("Rule #4 applied for mouse down on %0", element.id);
// Rule #6
} else if(currentIsSelectable && currentIsSelected &&
modifierKeyPressed) {
var newSelection = this.selection.clone();
this.setSelection(newSelection.without(elementController))
ORYX.Log.trace("Rule #6 applied for mouse down on %0", elementController.id);
// Rule #5: When there is at least one object selected and no control
// key pressed, we're dragging.
/*} else if(currentIsSelectable && !noObjectsSelected
&& !modifierKeyPressed) {
if(this.log.isTraceEnabled())
this.log.trace("Rule #5 applied for mouse down on "+element.id);
*/
// Rule #2: When clicked on something that is neither
// selectable nor movable, clear the selection, and return.
} else if (!currentIsSelectable && !currentIsMovable) {
this.setSelection([]);
ORYX.Log.trace("Rule #2 applied for mouse down on %0", element.id);
return;
// Rule #7: When the current object is not selectable but movable,
// it is probably a control. Leave the selection unchanged but set
// the movedObject to the current one and enable Drag. Dockers will
// be processed in the dragDocker plugin.
} else if(!currentIsSelectable && currentIsMovable && !(elementController instanceof ORYX.Core.Controls.Docker)) {
// TODO: If there is any moveable elements, do this in a plugin
//ORYX.Core.UIEnableDrag(event, elementController);
ORYX.Log.trace("Rule #7 applied for mouse down on %0", element.id);
// Rule #8: When the element is selectable and is currently selected and no
// modifier key is pressed
} else if(currentIsSelectable && currentIsSelected &&
!modifierKeyPressed) {
this._subSelection = this._subSelection != elementController ? elementController : undefined;
this.setSelection(this.selection, this._subSelection);
ORYX.Log.trace("Rule #8 applied for mouse down on %0", element.id);
}
// prevent event from bubbling, return.
//Event.stop(event);
return;
},
_handleMouseMove: function(event, uiObj) {
return;
},
_handleMouseUp: function(event, uiObj) {
// get canvas.
var canvas = this.getCanvas();
// find the shape that is responsible for this elemement's id.
var elementController = uiObj;
//get event position
var evPos = this.eventCoordinates(event);
//Event.stop(event);
},
_handleMouseHover: function(event, uiObj) {
return;
},
_handleMouseOut: function(event, uiObj) {
return;
},
/**
* Calculates the event coordinates to SVG document coordinates.
* @param {Event} event
* @return {SVGPoint} The event coordinates in the SVG document
*/
eventCoordinates: function(event) {
var canvas = this.getCanvas();
var svgPoint = canvas.node.ownerSVGElement.createSVGPoint();
svgPoint.x = event.clientX;
svgPoint.y = event.clientY;
var matrix = canvas.node.getScreenCTM();
return svgPoint.matrixTransform(matrix.inverse());
}
};
ORYX.Editor = Clazz.extend(ORYX.Editor);
/**
* Creates a new ORYX.Editor instance by fetching a model from given url and passing it to the constructur
* @param {String} modelUrl The JSON URL of a model.
* @param {Object} config Editor config passed to the constructur, merged with the response of the request to modelUrl
*/
ORYX.Editor.createByUrl = function(modelUrl, config){
if(!config) config = {};
new Ajax.Request(modelUrl, {
method: 'GET',
onSuccess: function(transport) {
var editorConfig = Ext.decode(transport.responseText);
editorConfig = Ext.applyIf(editorConfig, config);
new ORYX.Editor(editorConfig);
if ("function" == typeof(config.onSuccess)) {
config.onSuccess(transport);
}
}.bind(this),
onFailure: function(transport) {
if ("function" == typeof(config.onFailure)) {
config.onFailure(transport);
}
}.bind(this)
});
}
// TODO Implement namespace awareness on attribute level.
/**
* graft() function
* Originally by Sean M. Burke from interglacial.com, altered for usage with
* SVG and namespace (xmlns) support. Be sure you understand xmlns before
* using this funtion, as it creates all grafted elements in the xmlns
* provided by you and all element's attribures in default xmlns. If you
* need to graft elements in a certain xmlns and wish to assign attributes
* in both that and another xmlns, you will need to do stepwise grafting,
* adding non-default attributes yourself or you'll have to enhance this
* function. Latter, I would appreciate: martin�apfelfabrik.de
* @param {Object} namespace The namespace in which
* elements should be grafted.
* @param {Object} parent The element that should contain the grafted
* structure after the function returned.
* @param {Object} t the crafting structure.
* @param {Object} doc the document in which grafting is performed.
*/
ORYX.Editor.graft = function(namespace, parent, t, doc) {
doc = (doc || (parent && parent.ownerDocument) || document);
var e;
if(t === undefined) {
throw "Can't graft an undefined value";
} else if(t.constructor == String) {
e = doc.createTextNode( t );
} else {
for(var i = 0; i < t.length; i++) {
if( i === 0 && t[i].constructor == String ) {
var snared;
snared = t[i].match( /^([a-z][a-z0-9]*)\.([^\s\.]+)$/i );
if( snared ) {
e = doc.createElementNS(namespace, snared[1] );
e.setAttributeNS(null, 'class', snared[2] );
continue;
}
snared = t[i].match( /^([a-z][a-z0-9]*)$/i );
if( snared ) {
e = doc.createElementNS(namespace, snared[1] ); // but no class
continue;
}
// Otherwise:
e = doc.createElementNS(namespace, "span" );
e.setAttribute(null, "class", "namelessFromLOL" );
}
if( t[i] === undefined ) {
throw "Can't graft an undefined value in a list!";
} else if( t[i].constructor == String || t[i].constructor == Array ) {
this.graft(namespace, e, t[i], doc );
} else if( t[i].constructor == Number ) {
this.graft(namespace, e, t[i].toString(), doc );
} else if( t[i].constructor == Object ) {
// hash's properties => element's attributes
for(var k in t[i]) { e.setAttributeNS(null, k, t[i][k] ); }
} else {
}
}
}
if(parent) {
parent.appendChild( e );
} else {
}
return e; // return the topmost created node
};
ORYX.Editor.provideId = function() {
var res = [], hex = '0123456789ABCDEF';
for (var i = 0; i < 36; i++) res[i] = Math.floor(Math.random()*0x10);
res[14] = 4;
res[19] = (res[19] & 0x3) | 0x8;
for (var i = 0; i < 36; i++) res[i] = hex[res[i]];
res[8] = res[13] = res[18] = res[23] = '-';
return "_" + res.join('');
};
/**
* When working with Ext, conditionally the window needs to be resized. To do
* so, use this class method. Resize is deferred until 100ms, and all subsequent
* resizeBugFix calls are ignored until the initially requested resize is
* performed.
*/
ORYX.Editor.resizeFix = function() {
if (!ORYX.Editor._resizeFixTimeout) {
ORYX.Editor._resizeFixTimeout = window.setTimeout(function() {
window.resizeBy(1,1);
window.resizeBy(-1,-1);
ORYX.Editor._resizefixTimeout = null;
}, 100);
}
};
ORYX.Editor.Cookie = {
callbacks:[],
onChange: function( callback, interval ){
this.callbacks.push(callback);
this.start( interval )
},
start: function( interval ){
if( this.pe ){
return;
}
var currentString = document.cookie;
this.pe = new PeriodicalExecuter( function(){
if( currentString != document.cookie ){
currentString = document.cookie;
this.callbacks.each(function(callback){ callback(this.getParams()) }.bind(this));
}
}.bind(this), ( interval || 10000 ) / 1000);
},
stop: function(){
if( this.pe ){
this.pe.stop();
this.pe = null;
}
},
getParams: function(){
var res = {};
var p = document.cookie;
p.split("; ").each(function(param){ res[param.split("=")[0]] = param.split("=")[1];});
return res;
},
toString: function(){
return document.cookie;
}
};
/**
* Workaround for SAFARI/Webkit, because
* when trying to check SVGSVGElement of instanceof there is
* raising an error
*
*/
ORYX.Editor.SVGClassElementsAreAvailable = true;
ORYX.Editor.setMissingClasses = function() {
try {
SVGElement;
} catch(e) {
ORYX.Editor.SVGClassElementsAreAvailable = false;
SVGSVGElement = document.createElementNS('http://www.w3.org/2000/svg', 'svg').toString();
SVGGElement = document.createElementNS('http://www.w3.org/2000/svg', 'g').toString();
SVGPathElement = document.createElementNS('http://www.w3.org/2000/svg', 'path').toString();
SVGTextElement = document.createElementNS('http://www.w3.org/2000/svg', 'text').toString();
//SVGMarkerElement = document.createElementNS('http://www.w3.org/2000/svg', 'marker').toString();
SVGRectElement = document.createElementNS('http://www.w3.org/2000/svg', 'rect').toString();
SVGImageElement = document.createElementNS('http://www.w3.org/2000/svg', 'image').toString();
SVGCircleElement = document.createElementNS('http://www.w3.org/2000/svg', 'circle').toString();
SVGEllipseElement = document.createElementNS('http://www.w3.org/2000/svg', 'ellipse').toString();
SVGLineElement = document.createElementNS('http://www.w3.org/2000/svg', 'line').toString();
SVGPolylineElement = document.createElementNS('http://www.w3.org/2000/svg', 'polyline').toString();
SVGPolygonElement = document.createElementNS('http://www.w3.org/2000/svg', 'polygon').toString();
}
}
ORYX.Editor.checkIfSaved = function() {
if(ORYX.READONLY == true || ORYX.VIEWLOCKED == true) {
return true;
} else {
return ORYX.PROCESS_SAVED;
}
};
ORYX.Editor.checkClassType = function( classInst, classType ) {
if( ORYX.Editor.SVGClassElementsAreAvailable ){
return classInst instanceof classType
} else {
return classInst == classType
}
};
ORYX.Editor.makeExtModalWindowKeysave = function(facade) {
Ext.override(Ext.Window,{
beforeShow : function(){
delete this.el.lastXY;
delete this.el.lastLT;
if(this.x === undefined || this.y === undefined){
var xy = this.el.getAlignToXY(this.container, 'c-c');
var pos = this.el.translatePoints(xy[0], xy[1]);
this.x = this.x === undefined? pos.left : this.x;
this.y = this.y === undefined? pos.top : this.y;
}
this.el.setLeftTop(this.x, this.y);
if(this.expandOnShow){
this.expand(false);
}
if(this.modal){
facade.disableEvent(ORYX.CONFIG.EVENT_KEYDOWN);
Ext.getBody().addClass("x-body-masked");
this.mask.setSize(Ext.lib.Dom.getViewWidth(true), Ext.lib.Dom.getViewHeight(true));
this.mask.show();
}
},
afterHide : function(){
this.proxy.hide();
if(this.monitorResize || this.modal || this.constrain || this.constrainHeader){
Ext.EventManager.removeResizeListener(this.onWindowResize, this);
}
if(this.modal){
this.mask.hide();
facade.enableEvent(ORYX.CONFIG.EVENT_KEYDOWN);
Ext.getBody().removeClass("x-body-masked");
}
if(this.keyMap){
this.keyMap.disable();
}
this.fireEvent("hide", this);
},
beforeDestroy : function(){
if(this.modal)
facade.enableEvent(ORYX.CONFIG.EVENT_KEYDOWN);
Ext.destroy(
this.resizer,
this.dd,
this.proxy,
this.mask
);
Ext.Window.superclass.beforeDestroy.call(this);
}
});
}
| init |
TagForm.test.tsx | import { mount, ReactWrapper } from "enzyme";
import TagForm from "../index";
const resetMessage = jest.fn();
const resetForm = jest.fn();
const cancelForm = jest.fn();
const submitAction = jest.fn();
const initialProps = {
_id: "",
resetMessage,
serverError: "",
serverMessage: "",
resetForm,
cancelForm,
submitAction
};
describe("TagForm", () => {
let wrapper: ReactWrapper;
beforeEach(() => {
wrapper = mount(<TagForm {...initialProps} />);
});
afterEach(() => {
resetMessage.mockClear();
resetForm.mockClear();
cancelForm.mockClear();
submitAction.mockClear();
});
it("renders without error ", () => {
expect(wrapper.find("form")).toExist();
});
it("calls the handleChange which updates a field", () => {
const value = "updated!";
const name = "tagName";
const inputNode = () => wrapper.find("[data-testid='tagName']").first();
inputNode().simulate("change", { target: { name, value } });
expect(inputNode()).toHaveProp("value", value);
});
// it("calls resetMessage when the form is unmounted", () => {
// wrapper.unmount();
// expect(resetMessage).toHaveBeenCalledTimes(1);
// });
it("when a tag submits an empty form, the form displays errors", () => {
wrapper.find("form").simulate("submit");
expect(wrapper.find("[data-testid='errors']")).toHaveLength(2);
});
describe("with form data", () => {
beforeEach(() => {
["tagName", "category"].forEach(name => {
wrapper
.find(`[data-testid="${name}"]`)
.first()
.simulate("change", {
target: { name, value: "taggy" }
});
});
});
it("when the form is submitted, it calls submitAction with form values and an id when there is no errors", () => {
const value = "taggy";
const _id = "";
wrapper.find("form").simulate("submit");
expect(submitAction).toHaveBeenCalledWith({
_id,
tagName: value,
category: value
});
});
it("calls resetForm when the serverMessage", () => {
wrapper.find("form").simulate("submit");
wrapper.setProps({ serverMessage: "message" });
expect(resetForm).toHaveBeenCalledTimes(1);
});
it("when the form is submitted but a server error is thrown, then the form will not be submitting", () => {
const submitButton = () => wrapper.find("[data-testid='submit']");
wrapper.find("form").simulate("submit");
expect(submitButton()).toHaveProp("disabled", true);
wrapper.setProps({ serverError: "server" });
wrapper.update(); | });
}); | expect(submitButton()).toHaveProp("disabled", false);
}); |
kubelet_common.go | // Unless explicitly stated otherwise all files in this repository are licensed
// under the Apache License Version 2.0.
// This product includes software developed at Datadog (https://www.datadoghq.com/).
// Copyright 2018 Datadog, Inc.
package kubelet
import (
"bufio"
"bytes"
"errors"
"fmt"
"strings"
)
var (
// ErrNotCompiled is returned if kubelet support is not compiled in.
// User classes should handle that case as gracefully as possible.
ErrNotCompiled = errors.New("kubelet support not compiled in")
// KubePodPrefix is the entity prefix for Kubernetes pods
KubePodPrefix = "kubernetes_pod://"
)
| return ""
}
return fmt.Sprintf("%s%s", KubePodPrefix, uid)
}
// ParseMetricFromRaw parses a metric from raw prometheus text
func ParseMetricFromRaw(raw []byte, metric string) (string, error) {
bytesReader := bytes.NewReader(raw)
scanner := bufio.NewScanner(bytesReader)
for scanner.Scan() {
// skipping comments
if string(scanner.Text()[0]) == "#" {
continue
}
if strings.Contains(scanner.Text(), metric) {
return scanner.Text(), nil
}
}
return "", fmt.Errorf("%s metric not found in payload", metric)
} | // PodUIDToEntityName returns a prefixed entity name from a pod UID
func PodUIDToEntityName(uid string) string {
if uid == "" { |
solution.go | package main
import (
"bufio"
"bytes"
"fmt"
"os"
)
func | () {
reader := bufio.NewReader(os.Stdin)
tc := readNum(reader)
var buf bytes.Buffer
for tc > 0 {
tc--
n := readNum(reader)
S := readString(reader)
E := make([][]int, n-1)
for i := 0; i < n-1; i++ {
E[i] = readNNums(reader, 2)
}
q := readNum(reader)
Q := make([][]int, q)
for i := 0; i < q; i++ {
Q[i] = readNNums(reader, 2)
}
res := solve(n, S, E, Q)
for i := 0; i < q; i++ {
if res[i] {
buf.WriteString("YES\n")
} else {
buf.WriteString("NO\n")
}
}
}
fmt.Print(buf.String())
}
func readUint64(bytes []byte, from int, val *uint64) int {
i := from
var tmp uint64
for i < len(bytes) && bytes[i] >= '0' && bytes[i] <= '9' {
tmp = tmp*10 + uint64(bytes[i]-'0')
i++
}
*val = tmp
return i
}
func readInt(bytes []byte, from int, val *int) int {
i := from
sign := 1
if bytes[i] == '-' {
sign = -1
i++
}
tmp := 0
for i < len(bytes) && bytes[i] >= '0' && bytes[i] <= '9' {
tmp = tmp*10 + int(bytes[i]-'0')
i++
}
*val = tmp * sign
return i
}
func readString(reader *bufio.Reader) string {
s, _ := reader.ReadString('\n')
for i := 0; i < len(s); i++ {
if s[i] == '\n' {
return s[:i]
}
}
return s
}
func readNum(reader *bufio.Reader) (a int) {
bs, _ := reader.ReadBytes('\n')
readInt(bs, 0, &a)
return
}
func readTwoNums(reader *bufio.Reader) (a int, b int) {
res := readNNums(reader, 2)
a, b = res[0], res[1]
return
}
func readThreeNums(reader *bufio.Reader) (a int, b int, c int) {
res := readNNums(reader, 3)
a, b, c = res[0], res[1], res[2]
return
}
func readNNums(reader *bufio.Reader, n int) []int {
res := make([]int, n)
if n == 0 {
return res
}
x := 0
bs, _ := reader.ReadBytes('\n')
for i := 0; i < n; i++ {
for x < len(bs) && (bs[x] < '0' || bs[x] > '9') && bs[x] != '-' {
x++
}
x = readInt(bs, x, &res[i])
}
return res
}
func readFloat64(bytes []byte, from int, val *float64) int {
i := from
var sign float64 = 1
if bytes[i] == '-' {
sign = -1
i++
}
var real int64
for i < len(bytes) && bytes[i] >= '0' && bytes[i] <= '9' {
real = real*10 + int64(bytes[i]-'0')
i++
}
if i == len(bytes) || bytes[i] != '.' {
*val = float64(real)
return i
}
// bytes[i] == '.'
i++
var fraq float64
var base float64 = 0.1
for i < len(bytes) && bytes[i] >= '0' && bytes[i] <= '9' {
fraq += base * float64(bytes[i]-'0')
base /= 10
i++
}
*val = (float64(real) + fraq) * sign
return i
}
func readNFloats(reader *bufio.Reader, n int) []float64 {
s, _ := reader.ReadBytes('\n')
res := make([]float64, n)
var pos int
for i := 0; i < n; i++ {
pos = readFloat64(s, pos, &res[i]) + 1
}
return res
}
const H = 20
func solve(n int, S string, E [][]int, Q [][]int) []bool {
g := NewGraph(n, len(E)*2)
for _, e := range E {
u, v := e[0], e[1]
u--
v--
g.AddEdge(u, v)
g.AddEdge(v, u)
}
D := make([]int, n)
P := make([][]int, n)
cnt := make([][]int, n)
for i := 0; i < n; i++ {
cnt[i] = make([]int, 26)
}
var dfs func(p, u int)
dfs = func(p, u int) {
P[u] = make([]int, H)
P[u][0] = p
for i := 1; i < H; i++ {
P[u][i] = P[P[u][i-1]][i-1]
}
cnt[u][int(S[u]-'a')]++
for i := g.nodes[u]; i > 0; i = g.next[i] {
v := g.to[i]
if p != v {
D[v] = D[u] + 1
copy(cnt[v], cnt[u])
dfs(u, v)
}
}
}
dfs(0, 0)
lca := func(u, v int) int {
if D[u] < D[v] {
u, v = v, u
}
for i := H - 1; i >= 0; i-- {
if D[u]-(1<<uint(i)) >= D[v] {
u = P[u][i]
}
}
if u == v {
return u
}
for i := H - 1; i >= 0; i-- {
if P[u][i] != P[v][i] {
u = P[u][i]
v = P[v][i]
}
}
return P[u][0]
}
ans := make([]bool, len(Q))
for i, cur := range Q {
u, v := cur[0], cur[1]
u--
v--
p := lca(u, v)
if p != u && p != v {
for j := 0; j < 26; j++ {
if cnt[u][j]-cnt[p][j] > 0 && cnt[v][j]-cnt[p][j] > 0 {
ans[i] = true
break
}
}
}
}
return ans
}
type Graph struct {
nodes []int
next []int
to []int
cur int
}
func NewGraph(n int, e int) *Graph {
nodes := make([]int, n)
next := make([]int, e+10)
to := make([]int, e+10)
return &Graph{nodes, next, to, 0}
}
func (g *Graph) AddEdge(u, v int) {
g.cur++
g.next[g.cur] = g.nodes[u]
g.nodes[u] = g.cur
g.to[g.cur] = v
}
| main |
TreeExample.shorthand.tsx | import * as React from 'react';
import { Tree } from '@fluentui/react-northstar';
const items = [
{
id: 'tree-item-1',
title: 'House Lannister',
items: [
{
id: 'tree-item-11',
title: 'Tywin',
items: [
{
id: 'tree-item-111',
title: 'Jaime',
},
{
id: 'tree-item-112',
title: 'Cersei',
},
{
id: 'tree-item-113',
title: 'Tyrion',
},
],
},
{
id: 'tree-item-12',
title: 'Kevan',
items: [
{
id: 'tree-item-121',
title: 'Lancel',
},
{
id: 'tree-item-122',
title: 'Willem',
},
{
id: 'tree-item-123',
title: 'Martyn',
},
],
},
],
},
{
id: 'tree-item-2',
title: 'House Targaryen',
items: [
{
id: 'tree-item-21',
title: 'Aerys',
items: [
{
id: 'tree-item-211',
title: 'Rhaegar',
},
{
id: 'tree-item-212', | title: 'Viserys',
},
{
id: 'tree-item-213',
title: 'Daenerys',
},
],
},
],
},
];
const TreeExampleShorthand = () => <Tree aria-label="default" items={items} />;
export default TreeExampleShorthand; | |
test_polynomial.py | import numpy as np
import pytest
from scipy import sparse
from scipy.sparse import random as sparse_random
from sklearn.utils._testing import assert_array_almost_equal
from numpy.testing import assert_allclose, assert_array_equal
from scipy.interpolate import BSpline
from sklearn.linear_model import LinearRegression
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import (
KBinsDiscretizer,
PolynomialFeatures,
SplineTransformer,
)
from sklearn.utils.fixes import linspace, sp_version, parse_version
@pytest.mark.parametrize("est", (PolynomialFeatures, SplineTransformer))
def test_polynomial_and_spline_array_order(est):
"""Test that output array has the given order."""
X = np.arange(10).reshape(5, 2)
def is_c_contiguous(a):
return np.isfortran(a.T)
assert is_c_contiguous(est().fit_transform(X))
assert is_c_contiguous(est(order="C").fit_transform(X))
assert np.isfortran(est(order="F").fit_transform(X))
@pytest.mark.parametrize(
"params, err_msg",
[
({"degree": -1}, "degree must be a non-negative integer"),
({"degree": 2.5}, "degree must be a non-negative integer"),
({"degree": "string"}, "degree must be a non-negative integer"),
({"n_knots": 1}, "n_knots must be a positive integer >= 2."),
({"n_knots": 1}, "n_knots must be a positive integer >= 2."),
({"n_knots": 2.5}, "n_knots must be a positive integer >= 2."),
({"n_knots": "string"}, "n_knots must be a positive integer >= 2."),
({"knots": 1}, "Expected 2D array, got scalar array instead:"),
({"knots": [1, 2]}, "Expected 2D array, got 1D array instead:"),
(
{"knots": [[1]]},
r"Number of knots, knots.shape\[0\], must be >= 2.",
),
(
{"knots": [[1, 5], [2, 6]]},
r"knots.shape\[1\] == n_features is violated.",
),
(
{"knots": [[1], [1], [2]]},
"knots must be sorted without duplicates.",
),
({"knots": [[2], [1]]}, "knots must be sorted without duplicates."),
(
{"extrapolation": None},
"extrapolation must be one of 'error', 'constant', 'linear', "
"'continue' or 'periodic'.",
),
(
{"extrapolation": 1},
"extrapolation must be one of 'error', 'constant', 'linear', "
"'continue' or 'periodic'.",
),
(
{"extrapolation": "string"},
"extrapolation must be one of 'error', 'constant', 'linear', "
"'continue' or 'periodic'.",
),
({"include_bias": None}, "include_bias must be bool."),
({"include_bias": 1}, "include_bias must be bool."),
({"include_bias": "string"}, "include_bias must be bool."),
(
{"extrapolation": "periodic", "n_knots": 3, "degree": 3},
"Periodic splines require degree < n_knots. Got n_knots=3 and degree=3.",
),
(
{"extrapolation": "periodic", "knots": [[0], [1]], "degree": 2},
"Periodic splines require degree < n_knots. Got n_knots=2 and degree=2.",
),
],
)
def test_spline_transformer_input_validation(params, err_msg):
"""Test that we raise errors for invalid input in SplineTransformer."""
X = [[1], [2]]
with pytest.raises(ValueError, match=err_msg):
SplineTransformer(**params).fit(X)
def test_spline_transformer_manual_knot_input():
"""
Test that array-like knot positions in SplineTransformer are accepted.
"""
X = np.arange(20).reshape(10, 2)
knots = [[0.5, 1], [1.5, 2], [5, 10]]
st1 = SplineTransformer(degree=3, knots=knots, n_knots=None).fit(X)
knots = np.asarray(knots)
st2 = SplineTransformer(degree=3, knots=knots, n_knots=None).fit(X)
for i in range(X.shape[1]):
assert_allclose(st1.bsplines_[i].t, st2.bsplines_[i].t)
@pytest.mark.parametrize("extrapolation", ["continue", "periodic"])
def test_spline_transformer_integer_knots(extrapolation):
"""Test that SplineTransformer accepts integer value knot positions."""
X = np.arange(20).reshape(10, 2)
knots = [[0, 1], [1, 2], [5, 5], [11, 10], [12, 11]]
_ = SplineTransformer(
degree=3, knots=knots, extrapolation=extrapolation
).fit_transform(X)
def test_spline_transformer_feature_names():
"""Test that SplineTransformer generates correct features name."""
X = np.arange(20).reshape(10, 2)
splt = SplineTransformer(n_knots=3, degree=3, include_bias=True).fit(X)
feature_names = splt.get_feature_names()
assert_array_equal(
feature_names,
[
"x0_sp_0",
"x0_sp_1",
"x0_sp_2",
"x0_sp_3",
"x0_sp_4",
"x1_sp_0",
"x1_sp_1",
"x1_sp_2",
"x1_sp_3",
"x1_sp_4",
],
)
splt = SplineTransformer(n_knots=3, degree=3, include_bias=False).fit(X)
feature_names = splt.get_feature_names(["a", "b"])
assert_array_equal(
feature_names,
[
"a_sp_0",
"a_sp_1",
"a_sp_2",
"a_sp_3",
"b_sp_0",
"b_sp_1",
"b_sp_2",
"b_sp_3",
],
)
@pytest.mark.parametrize("degree", range(1, 5))
@pytest.mark.parametrize("n_knots", range(3, 5))
@pytest.mark.parametrize("knots", ["uniform", "quantile"])
@pytest.mark.parametrize("extrapolation", ["constant", "periodic"])
def test_spline_transformer_unity_decomposition(degree, n_knots, knots, extrapolation):
"""Test that B-splines are indeed a decomposition of unity.
Splines basis functions must sum up to 1 per row, if we stay in between
boundaries.
"""
X = np.linspace(0, 1, 100)[:, None]
# make the boundaries 0 and 1 part of X_train, for sure.
X_train = np.r_[[[0]], X[::2, :], [[1]]]
X_test = X[1::2, :]
if extrapolation == "periodic":
n_knots = n_knots + degree # periodic splines require degree < n_knots
splt = SplineTransformer(
n_knots=n_knots,
degree=degree,
knots=knots,
include_bias=True,
extrapolation=extrapolation,
)
splt.fit(X_train)
for X in [X_train, X_test]:
assert_allclose(np.sum(splt.transform(X), axis=1), 1)
@pytest.mark.parametrize(["bias", "intercept"], [(True, False), (False, True)])
def test_spline_transformer_linear_regression(bias, intercept):
"""Test that B-splines fit a sinusodial curve pretty well."""
X = np.linspace(0, 10, 100)[:, None]
y = np.sin(X[:, 0]) + 2 # +2 to avoid the value 0 in assert_allclose
pipe = Pipeline(
steps=[
(
"spline",
SplineTransformer(
n_knots=15,
degree=3,
include_bias=bias,
extrapolation="constant",
),
),
("ols", LinearRegression(fit_intercept=intercept)),
]
)
pipe.fit(X, y)
assert_allclose(pipe.predict(X), y, rtol=1e-3)
@pytest.mark.parametrize(
"knots, n_knots, degree",
[
("uniform", 5, 3),
("uniform", 12, 8),
(
[[-1.0, 0.0], [0, 1.0], [0.1, 2.0], [0.2, 3.0], [0.3, 4.0], [1, 5.0]],
None,
3,
),
],
)
def test_spline_transformer_periodicity_of_extrapolation(knots, n_knots, degree):
"""Test that the SplineTransformer is periodic for multiple features."""
X_1 = linspace((-1, 0), (1, 5), 10)
X_2 = linspace((1, 5), (3, 10), 10)
splt = SplineTransformer(
knots=knots, n_knots=n_knots, degree=degree, extrapolation="periodic"
)
splt.fit(X_1)
assert_allclose(splt.transform(X_1), splt.transform(X_2))
@pytest.mark.parametrize(["bias", "intercept"], [(True, False), (False, True)])
def test_spline_transformer_periodic_linear_regression(bias, intercept):
"""Test that B-splines fit a periodic curve pretty well."""
# "+ 3" to avoid the value 0 in assert_allclose
def f(x):
return np.sin(2 * np.pi * x) - np.sin(8 * np.pi * x) + 3
X = np.linspace(0, 1, 101)[:, None]
pipe = Pipeline(
steps=[
(
"spline",
SplineTransformer(
n_knots=20,
degree=3,
include_bias=bias,
extrapolation="periodic",
),
),
("ols", LinearRegression(fit_intercept=intercept)),
]
)
pipe.fit(X, f(X[:, 0]))
# Generate larger array to check periodic extrapolation
X_ = np.linspace(-1, 2, 301)[:, None]
predictions = pipe.predict(X_)
assert_allclose(predictions, f(X_[:, 0]), atol=0.01, rtol=0.01)
assert_allclose(predictions[0:100], predictions[100:200], rtol=1e-3)
@pytest.mark.skipif(
sp_version < parse_version("1.0.0"),
reason="Periodic extrapolation not yet implemented for BSpline.",
)
def test_spline_transformer_periodic_spline_backport():
"""Test that the backport of extrapolate="periodic" works correctly"""
X = np.linspace(-2, 3.5, 10)[:, None]
degree = 2
# Use periodic extrapolation backport in SplineTransformer
transformer = SplineTransformer(
degree=degree, extrapolation="periodic", knots=[[-1.0], [0.0], [1.0]]
)
Xt = transformer.fit_transform(X)
# Use periodic extrapolation in BSpline
coef = np.array([[1.0, 0.0], [0.0, 1.0], [1.0, 0.0], [0.0, 1.0]])
spl = BSpline(np.arange(-3, 4), coef, degree, "periodic")
Xspl = spl(X[:, 0])
assert_allclose(Xt, Xspl)
def test_spline_transformer_periodic_splines_periodicity():
"""
Test if shifted knots result in the same transformation up to permutation.
"""
X = np.linspace(0, 10, 101)[:, None]
transformer_1 = SplineTransformer(
degree=3,
extrapolation="periodic",
knots=[[0.0], [1.0], [3.0], [4.0], [5.0], [8.0]],
)
transformer_2 = SplineTransformer(
degree=3,
extrapolation="periodic",
knots=[[1.0], [3.0], [4.0], [5.0], [8.0], [9.0]],
)
Xt_1 = transformer_1.fit_transform(X)
Xt_2 = transformer_2.fit_transform(X)
assert_allclose(Xt_1, Xt_2[:, [4, 0, 1, 2, 3]])
@pytest.mark.parametrize("degree", [3, 5])
def test_spline_transformer_periodic_splines_smoothness(degree):
"""Test that spline transformation is smooth at first / last knot."""
X = np.linspace(-2, 10, 10_000)[:, None]
transformer = SplineTransformer(
degree=degree,
extrapolation="periodic",
knots=[[0.0], [1.0], [3.0], [4.0], [5.0], [8.0]],
)
Xt = transformer.fit_transform(X)
delta = (X.max() - X.min()) / len(X)
tol = 10 * delta
dXt = Xt
# We expect splines of degree `degree` to be (`degree`-1) times
# continuously differentiable. I.e. for d = 0, ..., `degree` - 1 the d-th
# derivative should be continous. This is the case if the (d+1)-th
# numerical derivative is reasonably small (smaller than `tol` in absolute
# value). We thus compute d-th numeric derivatives for d = 1, ..., `degree`
# and compare them to `tol`.
#
# Note that the 0-th derivative is the function itself, such that we are
# also checking its continuity.
for d in range(1, degree + 1):
# Check continuity of the (d-1)-th derivative
diff = np.diff(dXt, axis=0)
assert np.abs(diff).max() < tol
# Compute d-th numeric derivative
dXt = diff / delta
# As degree `degree` splines are not `degree` times continously
# differentiable at the knots, the `degree + 1`-th numeric derivative
# should have spikes at the knots.
diff = np.diff(dXt, axis=0)
assert np.abs(diff).max() > 1
@pytest.mark.parametrize(["bias", "intercept"], [(True, False), (False, True)])
@pytest.mark.parametrize("degree", [1, 2, 3, 4, 5])
def test_spline_transformer_extrapolation(bias, intercept, degree):
"""Test that B-spline extrapolation works correctly."""
# we use a straight line for that
X = np.linspace(-1, 1, 100)[:, None]
y = X.squeeze()
# 'constant'
pipe = Pipeline(
[
[
"spline",
SplineTransformer(
n_knots=4,
degree=degree,
include_bias=bias,
extrapolation="constant",
),
],
["ols", LinearRegression(fit_intercept=intercept)],
]
)
pipe.fit(X, y)
assert_allclose(pipe.predict([[-10], [5]]), [-1, 1])
# 'linear'
pipe = Pipeline(
[
[
"spline",
SplineTransformer(
n_knots=4,
degree=degree,
include_bias=bias,
extrapolation="linear",
),
],
["ols", LinearRegression(fit_intercept=intercept)],
]
)
pipe.fit(X, y)
assert_allclose(pipe.predict([[-10], [5]]), [-10, 5])
# 'error'
splt = SplineTransformer(
n_knots=4, degree=degree, include_bias=bias, extrapolation="error"
)
splt.fit(X)
with pytest.raises(ValueError):
splt.transform([[-10]])
with pytest.raises(ValueError):
splt.transform([[5]])
def test_spline_transformer_kbindiscretizer():
"""Test that a B-spline of degree=0 is equivalent to KBinsDiscretizer."""
rng = np.random.RandomState(97531)
X = rng.randn(200).reshape(200, 1)
n_bins = 5
n_knots = n_bins + 1
splt = SplineTransformer(
n_knots=n_knots, degree=0, knots="quantile", include_bias=True
)
splines = splt.fit_transform(X)
kbd = KBinsDiscretizer(n_bins=n_bins, encode="onehot-dense", strategy="quantile")
kbins = kbd.fit_transform(X)
# Though they should be exactly equal, we test approximately with high
# accuracy.
assert_allclose(splines, kbins, rtol=1e-13)
@pytest.mark.parametrize("n_knots", [5, 10])
@pytest.mark.parametrize("include_bias", [True, False])
@pytest.mark.parametrize("degree", [3, 5])
def test_spline_transformer_n_features_out(n_knots, include_bias, degree):
"""Test that transform results in n_features_out_ features."""
splt = SplineTransformer(n_knots=n_knots, degree=degree, include_bias=include_bias)
X = np.linspace(0, 1, 10)[:, None]
splt.fit(X)
assert splt.transform(X).shape[1] == splt.n_features_out_
@pytest.mark.parametrize(
"params, err_msg",
[
({"degree": -1}, "degree must be a non-negative integer"),
({"degree": 2.5}, "degree must be a non-negative int or tuple"),
({"degree": "12"}, r"degree=\(min_degree, max_degree\) must"),
({"degree": "string"}, "degree must be a non-negative int or tuple"),
({"degree": (-1, 2)}, r"degree=\(min_degree, max_degree\) must"),
({"degree": (0, 1.5)}, r"degree=\(min_degree, max_degree\) must"),
({"degree": (3, 2)}, r"degree=\(min_degree, max_degree\) must"),
],
)
def test_polynomial_features_input_validation(params, err_msg):
"""Test that we raise errors for invalid input in PolynomialFeatures."""
X = [[1], [2]]
with pytest.raises(ValueError, match=err_msg):
PolynomialFeatures(**params).fit(X)
@pytest.fixture()
def single_feature_degree3():
X = np.arange(6)[:, np.newaxis]
P = np.hstack([np.ones_like(X), X, X ** 2, X ** 3])
return X, P
@pytest.mark.parametrize(
"degree, include_bias, interaction_only, indices",
[
(3, True, False, slice(None, None)),
(3, False, False, slice(1, None)),
(3, True, True, [0, 1]),
(3, False, True, [1]),
((2, 3), True, False, [0, 2, 3]),
((2, 3), False, False, [2, 3]),
((2, 3), True, True, [0]),
((2, 3), False, True, []),
],
)
@pytest.mark.parametrize(
"sparse_X",
[False, sparse.csr_matrix, sparse.csc_matrix],
)
def test_polynomial_features_one_feature(
single_feature_degree3,
degree,
include_bias,
interaction_only,
indices,
sparse_X,
):
"""Test PolynomialFeatures on single feature up to degree 3."""
X, P = single_feature_degree3
if sparse_X:
X = sparse_X(X)
tf = PolynomialFeatures(
degree=degree, include_bias=include_bias, interaction_only=interaction_only
).fit(X)
out = tf.transform(X)
if sparse_X:
out = out.toarray()
assert_allclose(out, P[:, indices])
if tf.n_output_features_ > 0:
assert tf.powers_.shape == (tf.n_output_features_, tf.n_features_in_)
@pytest.fixture()
def two_features_degree3():
X = np.arange(6).reshape((3, 2))
x1 = X[:, :1]
x2 = X[:, 1:]
P = np.hstack(
[
x1 ** 0 * x2 ** 0, # 0
x1 ** 1 * x2 ** 0, # 1
x1 ** 0 * x2 ** 1, # 2
x1 ** 2 * x2 ** 0, # 3
x1 ** 1 * x2 ** 1, # 4
x1 ** 0 * x2 ** 2, # 5
x1 ** 3 * x2 ** 0, # 6
x1 ** 2 * x2 ** 1, # 7
x1 ** 1 * x2 ** 2, # 8
x1 ** 0 * x2 ** 3, # 9
]
)
return X, P
@pytest.mark.parametrize(
"degree, include_bias, interaction_only, indices",
[
(2, True, False, slice(0, 6)),
(2, False, False, slice(1, 6)),
(2, True, True, [0, 1, 2, 4]),
(2, False, True, [1, 2, 4]),
((2, 2), True, False, [0, 3, 4, 5]),
((2, 2), False, False, [3, 4, 5]),
((2, 2), True, True, [0, 4]),
((2, 2), False, True, [4]),
(3, True, False, slice(None, None)),
(3, False, False, slice(1, None)),
(3, True, True, [0, 1, 2, 4]),
(3, False, True, [1, 2, 4]),
((2, 3), True, False, [0, 3, 4, 5, 6, 7, 8, 9]),
((2, 3), False, False, slice(3, None)),
((2, 3), True, True, [0, 4]),
((2, 3), False, True, [4]),
((3, 3), True, False, [0, 6, 7, 8, 9]),
((3, 3), False, False, [6, 7, 8, 9]),
((3, 3), True, True, [0]),
((3, 3), False, True, []), # would need 3 input features
],
)
@pytest.mark.parametrize(
"sparse_X",
[False, sparse.csr_matrix, sparse.csc_matrix],
)
def test_polynomial_features_two_features(
two_features_degree3,
degree,
include_bias,
interaction_only,
indices,
sparse_X,
):
"""Test PolynomialFeatures on 2 features up to degree 3."""
X, P = two_features_degree3
if sparse_X:
X = sparse_X(X)
tf = PolynomialFeatures(
degree=degree, include_bias=include_bias, interaction_only=interaction_only
).fit(X)
out = tf.transform(X)
if sparse_X:
out = out.toarray()
assert_allclose(out, P[:, indices])
if tf.n_output_features_ > 0:
assert tf.powers_.shape == (tf.n_output_features_, tf.n_features_in_)
def test_polynomial_feature_names():
X = np.arange(30).reshape(10, 3)
poly = PolynomialFeatures(degree=2, include_bias=True).fit(X)
feature_names = poly.get_feature_names()
assert_array_equal(
["1", "x0", "x1", "x2", "x0^2", "x0 x1", "x0 x2", "x1^2", "x1 x2", "x2^2"],
feature_names,
)
assert len(feature_names) == poly.transform(X).shape[1]
poly = PolynomialFeatures(degree=3, include_bias=False).fit(X)
feature_names = poly.get_feature_names(["a", "b", "c"])
assert_array_equal(
[
"a",
"b",
"c",
"a^2",
"a b",
"a c",
"b^2",
"b c",
"c^2",
"a^3",
"a^2 b",
"a^2 c",
"a b^2",
"a b c",
"a c^2",
"b^3",
"b^2 c",
"b c^2",
"c^3",
],
feature_names,
)
assert len(feature_names) == poly.transform(X).shape[1]
poly = PolynomialFeatures(degree=(2, 3), include_bias=False).fit(X)
feature_names = poly.get_feature_names(["a", "b", "c"])
assert_array_equal(
[
"a^2",
"a b",
"a c",
"b^2",
"b c",
"c^2",
"a^3",
"a^2 b",
"a^2 c",
"a b^2",
"a b c",
"a c^2",
"b^3",
"b^2 c",
"b c^2",
"c^3",
],
feature_names,
)
assert len(feature_names) == poly.transform(X).shape[1]
poly = PolynomialFeatures(
degree=(3, 3), include_bias=True, interaction_only=True
).fit(X)
feature_names = poly.get_feature_names(["a", "b", "c"])
assert_array_equal(["1", "a b c"], feature_names)
assert len(feature_names) == poly.transform(X).shape[1]
# test some unicode
poly = PolynomialFeatures(degree=1, include_bias=True).fit(X)
feature_names = poly.get_feature_names(["\u0001F40D", "\u262E", "\u05D0"])
assert_array_equal(["1", "\u0001F40D", "\u262E", "\u05D0"], feature_names)
@pytest.mark.parametrize(
["deg", "include_bias", "interaction_only", "dtype"],
[
(1, True, False, int),
(2, True, False, int),
(2, True, False, np.float32),
(2, True, False, np.float64),
(3, False, False, np.float64),
(3, False, True, np.float64),
(4, False, False, np.float64),
(4, False, True, np.float64),
],
)
def test_polynomial_features_csc_X(deg, include_bias, interaction_only, dtype):
|
@pytest.mark.parametrize(
["deg", "include_bias", "interaction_only", "dtype"],
[
(1, True, False, int),
(2, True, False, int),
(2, True, False, np.float32),
(2, True, False, np.float64),
(3, False, False, np.float64),
(3, False, True, np.float64),
],
)
def test_polynomial_features_csr_X(deg, include_bias, interaction_only, dtype):
rng = np.random.RandomState(0)
X = rng.randint(0, 2, (100, 2))
X_csr = sparse.csr_matrix(X)
est = PolynomialFeatures(
deg, include_bias=include_bias, interaction_only=interaction_only
)
Xt_csr = est.fit_transform(X_csr.astype(dtype))
Xt_dense = est.fit_transform(X.astype(dtype, copy=False))
assert isinstance(Xt_csr, sparse.csr_matrix)
assert Xt_csr.dtype == Xt_dense.dtype
assert_array_almost_equal(Xt_csr.A, Xt_dense)
@pytest.mark.parametrize("n_features", [1, 4, 5])
@pytest.mark.parametrize(
"min_degree, max_degree", [(0, 1), (0, 2), (1, 3), (0, 4), (3, 4)]
)
@pytest.mark.parametrize("interaction_only", [True, False])
@pytest.mark.parametrize("include_bias", [True, False])
def test_num_combinations(
n_features,
min_degree,
max_degree,
interaction_only,
include_bias,
):
"""
Test that n_output_features_ is calculated correctly.
"""
x = sparse.csr_matrix(([1], ([0], [n_features - 1])))
est = PolynomialFeatures(
degree=max_degree,
interaction_only=interaction_only,
include_bias=include_bias,
)
est.fit(x)
num_combos = est.n_output_features_
combos = PolynomialFeatures._combinations(
n_features=n_features,
min_degree=0,
max_degree=max_degree,
interaction_only=interaction_only,
include_bias=include_bias,
)
assert num_combos == sum([1 for _ in combos])
@pytest.mark.parametrize(
["deg", "include_bias", "interaction_only", "dtype"],
[
(2, True, False, np.float32),
(2, True, False, np.float64),
(3, False, False, np.float64),
(3, False, True, np.float64),
],
)
def test_polynomial_features_csr_X_floats(deg, include_bias, interaction_only, dtype):
X_csr = sparse_random(1000, 10, 0.5, random_state=0).tocsr()
X = X_csr.toarray()
est = PolynomialFeatures(
deg, include_bias=include_bias, interaction_only=interaction_only
)
Xt_csr = est.fit_transform(X_csr.astype(dtype))
Xt_dense = est.fit_transform(X.astype(dtype))
assert isinstance(Xt_csr, sparse.csr_matrix)
assert Xt_csr.dtype == Xt_dense.dtype
assert_array_almost_equal(Xt_csr.A, Xt_dense)
@pytest.mark.parametrize(
["zero_row_index", "deg", "interaction_only"],
[
(0, 2, True),
(1, 2, True),
(2, 2, True),
(0, 3, True),
(1, 3, True),
(2, 3, True),
(0, 2, False),
(1, 2, False),
(2, 2, False),
(0, 3, False),
(1, 3, False),
(2, 3, False),
],
)
def test_polynomial_features_csr_X_zero_row(zero_row_index, deg, interaction_only):
X_csr = sparse_random(3, 10, 1.0, random_state=0).tocsr()
X_csr[zero_row_index, :] = 0.0
X = X_csr.toarray()
est = PolynomialFeatures(deg, include_bias=False, interaction_only=interaction_only)
Xt_csr = est.fit_transform(X_csr)
Xt_dense = est.fit_transform(X)
assert isinstance(Xt_csr, sparse.csr_matrix)
assert Xt_csr.dtype == Xt_dense.dtype
assert_array_almost_equal(Xt_csr.A, Xt_dense)
# This degree should always be one more than the highest degree supported by
# _csr_expansion.
@pytest.mark.parametrize(
["include_bias", "interaction_only"],
[(True, True), (True, False), (False, True), (False, False)],
)
def test_polynomial_features_csr_X_degree_4(include_bias, interaction_only):
X_csr = sparse_random(1000, 10, 0.5, random_state=0).tocsr()
X = X_csr.toarray()
est = PolynomialFeatures(
4, include_bias=include_bias, interaction_only=interaction_only
)
Xt_csr = est.fit_transform(X_csr)
Xt_dense = est.fit_transform(X)
assert isinstance(Xt_csr, sparse.csr_matrix)
assert Xt_csr.dtype == Xt_dense.dtype
assert_array_almost_equal(Xt_csr.A, Xt_dense)
@pytest.mark.parametrize(
["deg", "dim", "interaction_only"],
[
(2, 1, True),
(2, 2, True),
(3, 1, True),
(3, 2, True),
(3, 3, True),
(2, 1, False),
(2, 2, False),
(3, 1, False),
(3, 2, False),
(3, 3, False),
],
)
def test_polynomial_features_csr_X_dim_edges(deg, dim, interaction_only):
X_csr = sparse_random(1000, dim, 0.5, random_state=0).tocsr()
X = X_csr.toarray()
est = PolynomialFeatures(deg, interaction_only=interaction_only)
Xt_csr = est.fit_transform(X_csr)
Xt_dense = est.fit_transform(X)
assert isinstance(Xt_csr, sparse.csr_matrix)
assert Xt_csr.dtype == Xt_dense.dtype
assert_array_almost_equal(Xt_csr.A, Xt_dense)
def test_polynomial_features_deprecated_n_input_features():
# check that we raise a deprecation warning when accessing
# `n_input_features_`. FIXME: remove in 1.2
depr_msg = (
"The attribute `n_input_features_` was deprecated in version "
"1.0 and will be removed in 1.2."
)
X = np.arange(10).reshape(5, 2)
with pytest.warns(FutureWarning, match=depr_msg):
PolynomialFeatures().fit(X).n_input_features_
| rng = np.random.RandomState(0)
X = rng.randint(0, 2, (100, 2))
X_csc = sparse.csc_matrix(X)
est = PolynomialFeatures(
deg, include_bias=include_bias, interaction_only=interaction_only
)
Xt_csc = est.fit_transform(X_csc.astype(dtype))
Xt_dense = est.fit_transform(X.astype(dtype))
assert isinstance(Xt_csc, sparse.csc_matrix)
assert Xt_csc.dtype == Xt_dense.dtype
assert_array_almost_equal(Xt_csc.A, Xt_dense) |
setup.py | # -*- coding: utf-8 -*-
############################################################################
#
# Copyright © 2011, 2012, 2013, 2014, 2015 OnlineGroups.net and
# Contributors.
#
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
############################################################################
import codecs
import os
from setuptools import setup, find_packages
from version import get_version
version = get_version()
with codecs.open('README.rst', encoding='utf-8') as f:
long_description = f.read()
with codecs.open(os.path.join("docs", "HISTORY.rst"),
encoding='utf-8') as f:
long_description += '\n' + f.read()
setup(
name='gs.site.change.name',
version=version,
description="Change the name of a GroupServer site",
long_description=long_description,
classifiers=[
'Development Status :: 5 - Production/Stable',
"Environment :: Web Environment",
"Framework :: Zope2",
"Intended Audience :: Developers",
'License :: OSI Approved :: Zope Public License',
"Natural Language :: English",
"Natural Language :: French",
"Natural Language :: German",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules", | url='https://source.iopen.net/groupserver/gs.site.change.name/',
license='ZPL 2.1',
packages=find_packages(exclude=['ez_setup']),
namespace_packages=['gs', 'gs.site', 'gs.site.change', ],
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'zope.formlib',
'zope.browserpage',
'zope.i18n[compile]',
'zope.i18nmessageid',
'zope.interface',
'zope.schema',
'zope.tal',
'zope.tales',
'zope.viewlet',
'Zope2',
'gs.content.form.base',
'gs.content.layout',
'gs.help',
'gs.site.change.base',
'Products.GSContent',
],
entry_points="""
# -*- Entry points: -*-
""",) | ],
keywords='site ,groupserver, name, configure, admin',
author='Michael JasonSmith',
author_email='[email protected]', |
quick_sort.rs | fn quick_sort(list: &mut Vec<i32>, left: usize, right: usize) |
fn main() {
let my_vec = &mut vec![5,3,4,1,2];
let size = my_vec.len() - 1;
quick_sort(my_vec, 0, size);
println!("Hello world!! {:?}", my_vec);
}
| {
let (mut i, mut j) = (left, right);
let pivot = list[(left+right)/2];
while i <= j {
while list[i] < pivot {
i = i + 1;
}
while list[j] > pivot {
j = j - 1;
}
if i <= j {
list.swap(i, j);
i = i+1;
j = j-1;
}
}
if left < j {
quick_sort(list, left, j);
}
if i < right {
quick_sort(list, i, right);
}
} |
default_service_account.rs | use crate::authentication_manager::ServiceAccount;
use crate::prelude::*;
use hyper::body::Body;
use hyper::Method;
use std::str;
use std::sync::RwLock;
#[derive(Debug)]
pub struct DefaultServiceAccount {
token: RwLock<Token>,
}
impl DefaultServiceAccount {
const DEFAULT_PROJECT_ID_GCP_URI: &'static str =
"http://metadata.google.internal/computeMetadata/v1/project/project-id";
const DEFAULT_TOKEN_GCP_URI: &'static str = "http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/token";
pub async fn new(client: &HyperClient) -> Result<Self, Error> {
let token = RwLock::new(Self::get_token(client).await?);
Ok(Self { token })
}
fn build_token_request(uri: &str) -> Request<Body> {
Request::builder()
.method(Method::GET)
.uri(uri)
.header("Metadata-Flavor", "Google")
.body(Body::empty())
.unwrap()
}
async fn get_token(client: &HyperClient) -> Result<Token, Error> {
log::debug!("Getting token from GCP instance metadata server");
let req = Self::build_token_request(Self::DEFAULT_TOKEN_GCP_URI);
let token = client
.request(req)
.await
.map_err(Error::ConnectionError)?
.deserialize()
.await?;
Ok(token)
}
}
#[async_trait]
impl ServiceAccount for DefaultServiceAccount {
async fn project_id(&self, client: &HyperClient) -> Result<String, Error> {
log::debug!("Getting project ID from GCP instance metadata server");
let req = Self::build_token_request(Self::DEFAULT_PROJECT_ID_GCP_URI);
let rsp = client.request(req).await.map_err(Error::ConnectionError)?;
let (_, body) = rsp.into_parts();
let body = hyper::body::to_bytes(body) | .map_err(Error::ConnectionError)?;
match str::from_utf8(&body) {
Ok(s) => Ok(s.to_owned()),
Err(_) => Err(Error::ProjectIdNonUtf8),
}
}
fn get_token(&self, _scopes: &[&str]) -> Option<Token> {
Some(self.token.read().unwrap().clone())
}
async fn refresh_token(&self, client: &HyperClient, _scopes: &[&str]) -> Result<Token, Error> {
let token = Self::get_token(client).await?;
*self.token.write().unwrap() = token.clone();
Ok(token)
}
} | .await |
mod.rs | /*
Copyright (C) 2018-2019 [email protected]
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
mod code_table;
#[cfg(feature = "instr_info")]
mod condition_code_table;
#[cfg(feature = "instr_info")]
mod cpuid_feature_table;
#[cfg(any(feature = "gas", feature = "intel", feature = "masm", feature = "nasm"))]
mod decoder_options_table;
#[cfg(feature = "instr_info")]
mod encoding_kind_table;
#[cfg(feature = "instr_info")]
mod flow_control_table;
#[cfg(any(feature = "gas", feature = "intel", feature = "masm", feature = "nasm"))]
mod memory_size_options_table;
mod memory_size_table;
mod mnemonic_table;
#[cfg(any(feature = "gas", feature = "intel", feature = "masm", feature = "nasm"))]
mod number_base_table;
#[cfg(feature = "encoder")]
mod op_code_operand_kind_table;
#[cfg(any(feature = "gas", feature = "intel", feature = "masm", feature = "nasm"))]
mod options_props_table;
mod register_table;
#[cfg(feature = "encoder")]
mod tuple_type_table;
use self::code_table::*;
#[cfg(feature = "instr_info")]
use self::condition_code_table::*;
#[cfg(feature = "instr_info")]
use self::cpuid_feature_table::*;
#[cfg(any(feature = "gas", feature = "intel", feature = "masm", feature = "nasm"))]
use self::decoder_options_table::*;
#[cfg(feature = "instr_info")]
use self::encoding_kind_table::*;
#[cfg(feature = "instr_info")]
use self::flow_control_table::*;
#[cfg(any(feature = "gas", feature = "intel", feature = "masm", feature = "nasm"))]
use self::memory_size_options_table::*;
use self::memory_size_table::*;
use self::mnemonic_table::*;
#[cfg(any(feature = "gas", feature = "intel", feature = "masm", feature = "nasm"))]
use self::number_base_table::*;
#[cfg(feature = "encoder")]
use self::op_code_operand_kind_table::*;
#[cfg(any(feature = "gas", feature = "intel", feature = "masm", feature = "nasm"))]
use self::options_props_table::*;
use self::register_table::*;
#[cfg(feature = "encoder")]
use self::tuple_type_table::*;
#[cfg(any(feature = "gas", feature = "intel", feature = "masm", feature = "nasm"))]
use super::super::formatter::tests::enums::OptionsProps;
use super::super::*;
#[cfg(not(feature = "std"))]
use alloc::string::String;
#[cfg(not(feature = "std"))]
use alloc::vec::Vec;
#[cfg(any(feature = "gas", feature = "intel", feature = "masm", feature = "nasm"))]
use core::{i16, i8};
use core::{i32, u16, u32, u8};
#[cfg(feature = "instr_info")]
#[cfg(not(feature = "std"))]
use hashbrown::HashMap;
#[cfg(feature = "instr_info")]
#[cfg(feature = "std")]
use std::collections::HashMap;
pub(crate) fn to_vec_u8(hex_data: &str) -> Result<Vec<u8>, String> {
let mut bytes = Vec::with_capacity(hex_data.len() / 2);
let mut iter = hex_data.chars().filter(|c| !c.is_whitespace());
loop {
let hi = try_parse_hex_char(match iter.next() {
Some(c) => c,
None => break,
});
let lo = try_parse_hex_char(match iter.next() {
Some(c) => c,
None => return Err(format!("Missing hex digit in string: '{}'", hex_data)),
});
if hi < 0 || lo < 0 {
return Err(format!("Invalid hex string: '{}'", hex_data));
}
bytes.push(((hi << 4) | lo) as u8);
}
fn try_parse_hex_char(c: char) -> i32 {
if '0' <= c && c <= '9' {
return c as i32 - '0' as i32;
}
if 'A' <= c && c <= 'F' {
return c as i32 - 'A' as i32 + 10;
}
if 'a' <= c && c <= 'f' {
return c as i32 - 'a' as i32 + 10;
}
-1
}
Ok(bytes)
} | let result = if value.starts_with("0x") { u64::from_str_radix(&value[2..], 16) } else { value.trim().parse() };
match result {
Ok(value) => Ok(value),
Err(_) => Err(format!("Invalid number: {}", value)),
}
}
#[cfg(any(feature = "encoder", feature = "instr_info", feature = "gas", feature = "intel", feature = "masm", feature = "nasm"))]
pub(crate) fn to_i64(value: &str) -> Result<i64, String> {
let mut unsigned_value = value.trim();
let mult = if unsigned_value.starts_with('-') {
unsigned_value = &unsigned_value[1..];
-1
} else {
1
};
let result = if unsigned_value.starts_with("0x") { u64::from_str_radix(&unsigned_value[2..], 16) } else { unsigned_value.trim().parse() };
match result {
Ok(value) => Ok((value as i64).wrapping_mul(mult)),
Err(_) => Err(format!("Invalid number: {}", value)),
}
}
pub(crate) fn to_u32(value: &str) -> Result<u32, String> {
let value = value.trim();
if let Ok(v64) = to_u64(value) {
if v64 <= u32::MAX as u64 {
return Ok(v64 as u32);
}
}
Err(format!("Invalid number: {}", value))
}
#[cfg(any(feature = "encoder", feature = "instr_info", feature = "gas", feature = "intel", feature = "masm", feature = "nasm"))]
pub(crate) fn to_i32(value: &str) -> Result<i32, String> {
let value = value.trim();
if let Ok(v64) = to_i64(value) {
if i32::MIN as i64 <= v64 && v64 <= i32::MAX as i64 {
return Ok(v64 as i32);
}
}
Err(format!("Invalid number: {}", value))
}
pub(crate) fn to_u16(value: &str) -> Result<u16, String> {
let value = value.trim();
if let Ok(v64) = to_u64(value) {
if v64 <= u16::MAX as u64 {
return Ok(v64 as u16);
}
}
Err(format!("Invalid number: {}", value))
}
#[cfg(any(feature = "gas", feature = "intel", feature = "masm", feature = "nasm"))]
pub(crate) fn to_i16(value: &str) -> Result<i16, String> {
let value = value.trim();
if let Ok(v64) = to_i64(value) {
if i16::MIN as i64 <= v64 && v64 <= i16::MAX as i64 {
return Ok(v64 as i16);
}
}
Err(format!("Invalid number: {}", value))
}
pub(crate) fn to_u8(value: &str) -> Result<u8, String> {
let value = value.trim();
if let Ok(v64) = to_u64(value) {
if v64 <= u8::MAX as u64 {
return Ok(v64 as u8);
}
}
Err(format!("Invalid number: {}", value))
}
#[cfg(any(feature = "gas", feature = "intel", feature = "masm", feature = "nasm"))]
pub(crate) fn to_i8(value: &str) -> Result<i8, String> {
let value = value.trim();
if let Ok(v64) = to_i64(value) {
if i8::MIN as i64 <= v64 && v64 <= i8::MAX as i64 {
return Ok(v64 as i8);
}
}
Err(format!("Invalid number: {}", value))
}
pub(crate) fn to_code(value: &str) -> Result<Code, String> {
let value = value.trim();
match TO_CODE_HASH.get(value) {
Some(code) => Ok(*code),
None => Err(format!("Invalid Code value: {}", value)),
}
}
#[cfg(any(feature = "gas", feature = "intel", feature = "masm", feature = "nasm"))]
pub(crate) fn code_names() -> Vec<&'static str> {
(&*TO_CODE_HASH).iter().map(|kv| *kv.0).collect()
}
pub(crate) fn to_mnemonic(value: &str) -> Result<Mnemonic, String> {
let value = value.trim();
match TO_MNEMONIC_HASH.get(value) {
Some(mnemonic) => Ok(*mnemonic),
None => Err(format!("Invalid Mnemonic value: {}", value)),
}
}
pub(crate) fn to_register(value: &str) -> Result<Register, String> {
let value = value.trim();
if value.is_empty() {
return Ok(Register::None);
}
match TO_REGISTER_HASH.get(value) {
Some(register) => Ok(*register),
None => Err(format!("Invalid Register value: {}", value)),
}
}
#[cfg(feature = "instr_info")]
pub(crate) fn clone_register_hashmap() -> HashMap<String, Register> {
TO_REGISTER_HASH.iter().map(|kv| ((*kv.0).to_string(), *kv.1)).collect()
}
pub(crate) fn to_memory_size(value: &str) -> Result<MemorySize, String> {
let value = value.trim();
match TO_MEMORY_SIZE_HASH.get(value) {
Some(memory_size) => Ok(*memory_size),
None => Err(format!("Invalid MemorySize value: {}", value)),
}
}
#[cfg(any(feature = "gas", feature = "intel", feature = "masm", feature = "nasm"))]
pub(crate) fn to_decoder_options(value: &str) -> Result<u32, String> {
let value = value.trim();
match TO_DECODER_OPTIONS_HASH.get(value) {
Some(decoder_options) => Ok(*decoder_options),
None => Err(format!("Invalid DecoderOptions value: {}", value)),
}
}
#[cfg(feature = "instr_info")]
pub(crate) fn to_encoding_kind(value: &str) -> Result<EncodingKind, String> {
let value = value.trim();
match TO_ENCODING_KIND_HASH.get(value) {
Some(encoding_kind) => Ok(*encoding_kind),
None => Err(format!("Invalid EncodingKind value: {}", value)),
}
}
#[cfg(feature = "encoder")]
pub(crate) fn to_tuple_type(value: &str) -> Result<TupleType, String> {
let value = value.trim();
match TO_TUPLE_TYPE_HASH.get(value) {
Some(tuple_type) => Ok(*tuple_type),
None => Err(format!("Invalid TupleType value: {}", value)),
}
}
#[cfg(feature = "instr_info")]
pub(crate) fn to_cpuid_features(value: &str) -> Result<CpuidFeature, String> {
let value = value.trim();
match TO_CPUID_FEATURE_HASH.get(value) {
Some(cpuid_features) => Ok(*cpuid_features),
None => Err(format!("Invalid CpuidFeature value: {}", value)),
}
}
#[cfg(feature = "instr_info")]
pub(crate) fn to_flow_control(value: &str) -> Result<FlowControl, String> {
let value = value.trim();
match TO_FLOW_CONTROL_HASH.get(value) {
Some(flow_control) => Ok(*flow_control),
None => Err(format!("Invalid FlowControl value: {}", value)),
}
}
#[cfg(feature = "encoder")]
pub(crate) fn to_op_code_operand_kind(value: &str) -> Result<OpCodeOperandKind, String> {
let value = value.trim();
match TO_OP_CODE_OPERAND_KIND_HASH.get(value) {
Some(op_code_operand_kind) => Ok(*op_code_operand_kind),
None => Err(format!("Invalid OpCodeOperandKind value: {}", value)),
}
}
#[cfg(feature = "instr_info")]
pub(crate) fn to_condition_code(value: &str) -> Result<ConditionCode, String> {
let value = value.trim();
match TO_CONDITION_CODE_HASH.get(value) {
Some(condition_code) => Ok(*condition_code),
None => Err(format!("Invalid ConditionCode value: {}", value)),
}
}
#[cfg(any(feature = "gas", feature = "intel", feature = "masm", feature = "nasm"))]
pub(crate) fn to_options_props(value: &str) -> Result<OptionsProps, String> {
let value = value.trim();
match TO_OPTIONS_PROPS_HASH.get(value) {
Some(options_props) => Ok(*options_props),
None => Err(format!("Invalid OptionsProps value: {}", value)),
}
}
#[cfg(any(feature = "gas", feature = "intel", feature = "masm", feature = "nasm"))]
pub(crate) fn to_memory_size_options(value: &str) -> Result<MemorySizeOptions, String> {
let value = value.trim();
match TO_MEMORY_SIZE_OPTIONS_HASH.get(value) {
Some(memory_size_options) => Ok(*memory_size_options),
None => Err(format!("Invalid MemorySizeOptions value: {}", value)),
}
}
#[cfg(any(feature = "gas", feature = "intel", feature = "masm", feature = "nasm"))]
pub(crate) fn to_number_base(value: &str) -> Result<NumberBase, String> {
let value = value.trim();
match TO_NUMBER_BASE_HASH.get(value) {
Some(number_base) => Ok(*number_base),
None => Err(format!("Invalid NumberBase value: {}", value)),
}
}
#[cfg(any(feature = "gas", feature = "intel", feature = "masm", feature = "nasm"))]
pub(crate) fn number_base_len() -> usize {
TO_NUMBER_BASE_HASH.len()
}
#[cfg(any(feature = "gas", feature = "intel", feature = "masm", feature = "nasm"))]
pub(crate) fn to_boolean(value: &str) -> Result<bool, String> {
let value = value.trim();
match value {
"false" => Ok(false),
"true" => Ok(true),
_ => Err(format!("Invalid boolean value: {}", value)),
}
} |
pub(crate) fn to_u64(value: &str) -> Result<u64, String> {
let value = value.trim(); |
socket.rs | use crate::{
sinks::util::tcp::{Encoding, TcpSinkConfig, TlsConfig},
topology::config::{DataType, SinkConfig, SinkContext, SinkDescription},
};
use serde::{Deserialize, Serialize};
#[derive(Deserialize, Serialize, Debug)]
// TODO: add back when serde-rs/serde#1358 is addressed
// #[serde(deny_unknown_fields)]
pub struct | {
#[serde(flatten)]
pub mode: Mode,
}
#[derive(Deserialize, Serialize, Debug, Clone)]
#[serde(tag = "mode", rename_all = "snake_case")]
pub enum Mode {
Tcp(TcpSinkConfig),
}
inventory::submit! {
SinkDescription::new_without_default::<SocketSinkConfig>("socket")
}
impl SocketSinkConfig {
pub fn make_tcp_config(address: String, encoding: Encoding, tls: Option<TlsConfig>) -> Self {
TcpSinkConfig {
address,
encoding,
tls,
}
.into()
}
pub fn make_basic_tcp_config(address: String) -> Self {
TcpSinkConfig::new(address).into()
}
}
impl From<TcpSinkConfig> for SocketSinkConfig {
fn from(config: TcpSinkConfig) -> Self {
Self {
mode: Mode::Tcp(config),
}
}
}
#[typetag::serde(name = "socket")]
impl SinkConfig for SocketSinkConfig {
fn build(&self, cx: SinkContext) -> crate::Result<(super::RouterSink, super::Healthcheck)> {
match &self.mode {
Mode::Tcp(config) => config.build(cx),
}
}
fn input_type(&self) -> DataType {
DataType::Log
}
fn sink_type(&self) -> &'static str {
"socket"
}
}
| SocketSinkConfig |
app.ts | import {Component} from "angular2/core";
import {SideMenu} from "./sideMenu.component.ts";
import {TopBar} from "./topBar.component.ts";
import {RouteConfig, RouterLink} from "angular2/router";
import {MainView} from "./mainView.component";
import {EventsHome} from "./../../events/components/eventsHome.component";
import {FriendsHome} from "./../../friends/components/friendsHome.component";
import {ChargesHome} from "./../../charges/components/chargesHome.component";
require("../../../assets/surface-1.01/prod/css/surface_styles.css");
require("../../../assets/sass/style.scss");
@Component({
selector: 'main-app',
template: require('./app.html'),
directives: [SideMenu, TopBar, MainView, RouterLink]
})
@RouteConfig([
{
path: '/events/...', name: 'EventsHome', component: EventsHome, useAsDefault: true
},
{
path: '/friends/...', name: 'FriendsHome', component: FriendsHome
},
{
path: '/charges/...', name: 'ChargesHome', component: ChargesHome
}])
export class MainApp { | } | constructor(){
} |
0003_auto_20190709_2301.py | # Generated by Django 2.1.7 on 2019-07-09 23:01
from django.db import migrations, models
class Migration(migrations.Migration):
|
operations = [
migrations.AlterField(
model_name='timeseries',
name='begin_date',
field=models.DateField(blank=True, default=None, null=True),
),
migrations.AlterField(
model_name='timeseries',
name='end_date',
field=models.DateField(blank=True, default=None, null=True),
),
] | dependencies = [
('hydroserver_core', '0002_auto_20190709_2226'),
] |
map.js | !function(e){var r={};function t(n){if(r[n])return r[n].exports;var a=r[n]={i:n,l:!1,exports:{}};return e[n].call(a.exports,a,a.exports,t),a.l=!0,a.exports}t.m=e,t.c=r,t.d=function(e,r,n){t.o(e,r)||Object.defineProperty(e,r,{enumerable:!0,get:n})},t.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},t.t=function(e,r){if(1&r&&(e=t(e)),8&r)return e;if(4&r&&"object"==typeof e&&e&&e.__esModule)return e;var n=Object.create(null);if(t.r(n),Object.defineProperty(n,"default",{enumerable:!0,value:e}),2&r&&"string"!=typeof e)for(var a in e)t.d(n,a,function(r){return e[r]}.bind(null,a));return n},t.n=function(e){var r=e&&e.__esModule?function(){return e.default}:function(){return e};return t.d(r,"a",r),r},t.o=function(e,r){return Object.prototype.hasOwnProperty.call(e,r)},t.p="/",t(t.s=1)}({1:function(e,r,t){e.exports=t("mUkR")},"8oxB":function(e,r){var t,n,a=e.exports={};function | (){throw new Error("setTimeout has not been defined")}function o(){throw new Error("clearTimeout has not been defined")}function s(e){if(t===setTimeout)return setTimeout(e,0);if((t===i||!t)&&setTimeout)return t=setTimeout,setTimeout(e,0);try{return t(e,0)}catch(r){try{return t.call(null,e,0)}catch(r){return t.call(this,e,0)}}}!function(){try{t="function"==typeof setTimeout?setTimeout:i}catch(e){t=i}try{n="function"==typeof clearTimeout?clearTimeout:o}catch(e){n=o}}();var l,c=[],u=!1,p=-1;function d(){u&&l&&(u=!1,l.length?c=l.concat(c):p=-1,c.length&&f())}function f(){if(!u){var e=s(d);u=!0;for(var r=c.length;r;){for(l=c,c=[];++p<r;)l&&l[p].run();p=-1,r=c.length}l=null,u=!1,function(e){if(n===clearTimeout)return clearTimeout(e);if((n===o||!n)&&clearTimeout)return n=clearTimeout,clearTimeout(e);try{n(e)}catch(r){try{return n.call(null,e)}catch(r){return n.call(this,e)}}}(e)}}function m(e,r){this.fun=e,this.array=r}function g(){}a.nextTick=function(e){var r=new Array(arguments.length-1);if(arguments.length>1)for(var t=1;t<arguments.length;t++)r[t-1]=arguments[t];c.push(new m(e,r)),1!==c.length||u||s(f)},m.prototype.run=function(){this.fun.apply(null,this.array)},a.title="browser",a.browser=!0,a.env={},a.argv=[],a.version="",a.versions={},a.on=g,a.addListener=g,a.once=g,a.off=g,a.removeListener=g,a.removeAllListeners=g,a.emit=g,a.prependListener=g,a.prependOnceListener=g,a.listeners=function(e){return[]},a.binding=function(e){throw new Error("process.binding is not supported")},a.cwd=function(){return"/"},a.chdir=function(e){throw new Error("process.chdir is not supported")},a.umask=function(){return 0}},"9tPo":function(e,r){e.exports=function(e){var r="undefined"!=typeof window&&window.location;if(!r)throw new Error("fixUrls requires window.location");if(!e||"string"!=typeof e)return e;var t=r.protocol+"//"+r.host,n=t+r.pathname.replace(/\/[^\/]*$/,"/");return e.replace(/url\s*\(((?:[^)(]|\((?:[^)(]+|\([^)(]*\))*\))*)\)/gi,(function(e,r){var a,i=r.trim().replace(/^"(.*)"$/,(function(e,r){return r})).replace(/^'(.*)'$/,(function(e,r){return r}));return/^(#|data:|http:\/\/|https:\/\/|file:\/\/\/|\s*$)/i.test(i)?e:(a=0===i.indexOf("//")?i:0===i.indexOf("/")?t+i:n+i.replace(/^\.\//,""),"url("+JSON.stringify(a)+")")}))}},I1BE:function(e,r){e.exports=function(e){var r=[];return r.toString=function(){return this.map((function(r){var t=function(e,r){var t=e[1]||"",n=e[3];if(!n)return t;if(r&&"function"==typeof btoa){var a=(o=n,"/*# sourceMappingURL=data:application/json;charset=utf-8;base64,"+btoa(unescape(encodeURIComponent(JSON.stringify(o))))+" */"),i=n.sources.map((function(e){return"/*# sourceURL="+n.sourceRoot+e+" */"}));return[t].concat(i).concat([a]).join("\n")}var o;return[t].join("\n")}(r,e);return r[2]?"@media "+r[2]+"{"+t+"}":t})).join("")},r.i=function(e,t){"string"==typeof e&&(e=[[null,e,""]]);for(var n={},a=0;a<this.length;a++){var i=this[a][0];"number"==typeof i&&(n[i]=!0)}for(a=0;a<e.length;a++){var o=e[a];"number"==typeof o[0]&&n[o[0]]||(t&&!o[2]?o[2]=t:t&&(o[2]="("+o[2]+") and ("+t+")"),r.push(o))}},r}},PMrG:function(e,r,t){(e.exports=t("I1BE")(!1)).push([e.i,"/**\n * Swiper 6.3.3\n * Most modern mobile touch slider and framework with hardware accelerated transitions\n * http://swiperjs.com\n *\n * Copyright 2014-2020 Vladimir Kharlampidi\n *\n * Released under the MIT License\n *\n * Released on: October 9, 2020\n */\n\n@font-face {\n font-family: 'swiper-icons';\n src: url('data:application/font-woff;charset=utf-8;base64, d09GRgABAAAAAAZgABAAAAAADAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABGRlRNAAAGRAAAABoAAAAci6qHkUdERUYAAAWgAAAAIwAAACQAYABXR1BPUwAABhQAAAAuAAAANuAY7+xHU1VCAAAFxAAAAFAAAABm2fPczU9TLzIAAAHcAAAASgAAAGBP9V5RY21hcAAAAkQAAACIAAABYt6F0cBjdnQgAAACzAAAAAQAAAAEABEBRGdhc3AAAAWYAAAACAAAAAj//wADZ2x5ZgAAAywAAADMAAAD2MHtryVoZWFkAAABbAAAADAAAAA2E2+eoWhoZWEAAAGcAAAAHwAAACQC9gDzaG10eAAAAigAAAAZAAAArgJkABFsb2NhAAAC0AAAAFoAAABaFQAUGG1heHAAAAG8AAAAHwAAACAAcABAbmFtZQAAA/gAAAE5AAACXvFdBwlwb3N0AAAFNAAAAGIAAACE5s74hXjaY2BkYGAAYpf5Hu/j+W2+MnAzMYDAzaX6QjD6/4//Bxj5GA8AuRwMYGkAPywL13jaY2BkYGA88P8Agx4j+/8fQDYfA1AEBWgDAIB2BOoAeNpjYGRgYNBh4GdgYgABEMnIABJzYNADCQAACWgAsQB42mNgYfzCOIGBlYGB0YcxjYGBwR1Kf2WQZGhhYGBiYGVmgAFGBiQQkOaawtDAoMBQxXjg/wEGPcYDDA4wNUA2CCgwsAAAO4EL6gAAeNpj2M0gyAACqxgGNWBkZ2D4/wMA+xkDdgAAAHjaY2BgYGaAYBkGRgYQiAHyGMF8FgYHIM3DwMHABGQrMOgyWDLEM1T9/w8UBfEMgLzE////P/5//f/V/xv+r4eaAAeMbAxwIUYmIMHEgKYAYjUcsDAwsLKxc3BycfPw8jEQA/gZBASFhEVExcQlJKWkZWTl5BUUlZRVVNXUNTQZBgMAAMR+E+gAEQFEAAAAKgAqACoANAA+AEgAUgBcAGYAcAB6AIQAjgCYAKIArAC2AMAAygDUAN4A6ADyAPwBBgEQARoBJAEuATgBQgFMAVYBYAFqAXQBfgGIAZIBnAGmAbIBzgHsAAB42u2NMQ6CUAyGW568x9AneYYgm4MJbhKFaExIOAVX8ApewSt4Bic4AfeAid3VOBixDxfPYEza5O+Xfi04YADggiUIULCuEJK8VhO4bSvpdnktHI5QCYtdi2sl8ZnXaHlqUrNKzdKcT8cjlq+rwZSvIVczNiezsfnP/uznmfPFBNODM2K7MTQ45YEAZqGP81AmGGcF3iPqOop0r1SPTaTbVkfUe4HXj97wYE+yNwWYxwWu4v1ugWHgo3S1XdZEVqWM7ET0cfnLGxWfkgR42o2PvWrDMBSFj/IHLaF0zKjRgdiVMwScNRAoWUoH78Y2icB/yIY09An6AH2Bdu/UB+yxopYshQiEvnvu0dURgDt8QeC8PDw7Fpji3fEA4z/PEJ6YOB5hKh4dj3EvXhxPqH/SKUY3rJ7srZ4FZnh1PMAtPhwP6fl2PMJMPDgeQ4rY8YT6Gzao0eAEA409DuggmTnFnOcSCiEiLMgxCiTI6Cq5DZUd3Qmp10vO0LaLTd2cjN4fOumlc7lUYbSQcZFkutRG7g6JKZKy0RmdLY680CDnEJ+UMkpFFe1RN7nxdVpXrC4aTtnaurOnYercZg2YVmLN/d/gczfEimrE/fs/bOuq29Zmn8tloORaXgZgGa78yO9/cnXm2BpaGvq25Dv9S4E9+5SIc9PqupJKhYFSSl47+Qcr1mYNAAAAeNptw0cKwkAAAMDZJA8Q7OUJvkLsPfZ6zFVERPy8qHh2YER+3i/BP83vIBLLySsoKimrqKqpa2hp6+jq6RsYGhmbmJqZSy0sraxtbO3sHRydnEMU4uR6yx7JJXveP7WrDycAAAAAAAH//wACeNpjYGRgYOABYhkgZgJCZgZNBkYGLQZtIJsFLMYAAAw3ALgAeNolizEKgDAQBCchRbC2sFER0YD6qVQiBCv/H9ezGI6Z5XBAw8CBK/m5iQQVauVbXLnOrMZv2oLdKFa8Pjuru2hJzGabmOSLzNMzvutpB3N42mNgZGBg4GKQYzBhYMxJLMlj4GBgAYow/P/PAJJhLM6sSoWKfWCAAwDAjgbRAAB42mNgYGBkAIIbCZo5IPrmUn0hGA0AO8EFTQAA') format('woff');\n font-weight: 400;\n font-style: normal;\n}\n:root {\n --swiper-theme-color: #007aff;\n}\n.swiper-container {\n margin-left: auto;\n margin-right: auto;\n position: relative;\n overflow: hidden;\n list-style: none;\n padding: 0;\n /* Fix of Webkit flickering */\n z-index: 1;\n}\n.swiper-container-vertical > .swiper-wrapper {\n -webkit-box-orient: vertical;\n -webkit-box-direction: normal;\n flex-direction: column;\n}\n.swiper-wrapper {\n position: relative;\n width: 100%;\n height: 100%;\n z-index: 1;\n display: -webkit-box;\n display: flex;\n -webkit-transition-property: -webkit-transform;\n transition-property: -webkit-transform;\n transition-property: transform;\n transition-property: transform, -webkit-transform;\n box-sizing: content-box;\n}\n.swiper-container-android .swiper-slide,\n.swiper-wrapper {\n -webkit-transform: translate3d(0px, 0, 0);\n transform: translate3d(0px, 0, 0);\n}\n.swiper-container-multirow > .swiper-wrapper {\n flex-wrap: wrap;\n}\n.swiper-container-multirow-column > .swiper-wrapper {\n flex-wrap: wrap;\n -webkit-box-orient: vertical;\n -webkit-box-direction: normal;\n flex-direction: column;\n}\n.swiper-container-free-mode > .swiper-wrapper {\n -webkit-transition-timing-function: ease-out;\n transition-timing-function: ease-out;\n margin: 0 auto;\n}\n.swiper-slide {\n flex-shrink: 0;\n width: 100%;\n height: 100%;\n position: relative;\n -webkit-transition-property: -webkit-transform;\n transition-property: -webkit-transform;\n transition-property: transform;\n transition-property: transform, -webkit-transform;\n}\n.swiper-slide-invisible-blank {\n visibility: hidden;\n}\n/* Auto Height */\n.swiper-container-autoheight,\n.swiper-container-autoheight .swiper-slide {\n height: auto;\n}\n.swiper-container-autoheight .swiper-wrapper {\n -webkit-box-align: start;\n align-items: flex-start;\n -webkit-transition-property: height, -webkit-transform;\n transition-property: height, -webkit-transform;\n transition-property: transform, height;\n transition-property: transform, height, -webkit-transform;\n}\n/* 3D Effects */\n.swiper-container-3d {\n -webkit-perspective: 1200px;\n perspective: 1200px;\n}\n.swiper-container-3d .swiper-wrapper,\n.swiper-container-3d .swiper-slide,\n.swiper-container-3d .swiper-slide-shadow-left,\n.swiper-container-3d .swiper-slide-shadow-right,\n.swiper-container-3d .swiper-slide-shadow-top,\n.swiper-container-3d .swiper-slide-shadow-bottom,\n.swiper-container-3d .swiper-cube-shadow {\n -webkit-transform-style: preserve-3d;\n transform-style: preserve-3d;\n}\n.swiper-container-3d .swiper-slide-shadow-left,\n.swiper-container-3d .swiper-slide-shadow-right,\n.swiper-container-3d .swiper-slide-shadow-top,\n.swiper-container-3d .swiper-slide-shadow-bottom {\n position: absolute;\n left: 0;\n top: 0;\n width: 100%;\n height: 100%;\n pointer-events: none;\n z-index: 10;\n}\n.swiper-container-3d .swiper-slide-shadow-left {\n background-image: -webkit-gradient(linear, right top, left top, from(rgba(0, 0, 0, 0.5)), to(rgba(0, 0, 0, 0)));\n background-image: linear-gradient(to left, rgba(0, 0, 0, 0.5), rgba(0, 0, 0, 0));\n}\n.swiper-container-3d .swiper-slide-shadow-right {\n background-image: -webkit-gradient(linear, left top, right top, from(rgba(0, 0, 0, 0.5)), to(rgba(0, 0, 0, 0)));\n background-image: linear-gradient(to right, rgba(0, 0, 0, 0.5), rgba(0, 0, 0, 0));\n}\n.swiper-container-3d .swiper-slide-shadow-top {\n background-image: -webkit-gradient(linear, left bottom, left top, from(rgba(0, 0, 0, 0.5)), to(rgba(0, 0, 0, 0)));\n background-image: linear-gradient(to top, rgba(0, 0, 0, 0.5), rgba(0, 0, 0, 0));\n}\n.swiper-container-3d .swiper-slide-shadow-bottom {\n background-image: -webkit-gradient(linear, left top, left bottom, from(rgba(0, 0, 0, 0.5)), to(rgba(0, 0, 0, 0)));\n background-image: linear-gradient(to bottom, rgba(0, 0, 0, 0.5), rgba(0, 0, 0, 0));\n}\n/* CSS Mode */\n.swiper-container-css-mode > .swiper-wrapper {\n overflow: auto;\n scrollbar-width: none;\n /* For Firefox */\n -ms-overflow-style: none;\n /* For Internet Explorer and Edge */\n}\n.swiper-container-css-mode > .swiper-wrapper::-webkit-scrollbar {\n display: none;\n}\n.swiper-container-css-mode > .swiper-wrapper > .swiper-slide {\n scroll-snap-align: start start;\n}\n.swiper-container-horizontal.swiper-container-css-mode > .swiper-wrapper {\n -ms-scroll-snap-type: x mandatory;\n scroll-snap-type: x mandatory;\n}\n.swiper-container-vertical.swiper-container-css-mode > .swiper-wrapper {\n -ms-scroll-snap-type: y mandatory;\n scroll-snap-type: y mandatory;\n}\n:root {\n --swiper-navigation-size: 44px;\n /*\n --swiper-navigation-color: var(--swiper-theme-color);\n */\n}\n.swiper-button-prev,\n.swiper-button-next {\n position: absolute;\n top: 50%;\n width: calc(var(--swiper-navigation-size) / 44 * 27);\n height: var(--swiper-navigation-size);\n margin-top: calc(-1 * var(--swiper-navigation-size) / 2);\n z-index: 10;\n cursor: pointer;\n display: -webkit-box;\n display: flex;\n -webkit-box-align: center;\n align-items: center;\n -webkit-box-pack: center;\n justify-content: center;\n color: var(--swiper-navigation-color, var(--swiper-theme-color));\n}\n.swiper-button-prev.swiper-button-disabled,\n.swiper-button-next.swiper-button-disabled {\n opacity: 0.35;\n cursor: auto;\n pointer-events: none;\n}\n.swiper-button-prev:after,\n.swiper-button-next:after {\n font-family: swiper-icons;\n font-size: var(--swiper-navigation-size);\n text-transform: none !important;\n letter-spacing: 0;\n text-transform: none;\n font-variant: initial;\n line-height: 1;\n}\n.swiper-button-prev,\n.swiper-container-rtl .swiper-button-next {\n left: 10px;\n right: auto;\n}\n.swiper-button-prev:after,\n.swiper-container-rtl .swiper-button-next:after {\n content: 'prev';\n}\n.swiper-button-next,\n.swiper-container-rtl .swiper-button-prev {\n right: 10px;\n left: auto;\n}\n.swiper-button-next:after,\n.swiper-container-rtl .swiper-button-prev:after {\n content: 'next';\n}\n.swiper-button-prev.swiper-button-white,\n.swiper-button-next.swiper-button-white {\n --swiper-navigation-color: #ffffff;\n}\n.swiper-button-prev.swiper-button-black,\n.swiper-button-next.swiper-button-black {\n --swiper-navigation-color: #000000;\n}\n.swiper-button-lock {\n display: none;\n}\n:root {\n /*\n --swiper-pagination-color: var(--swiper-theme-color);\n */\n}\n.swiper-pagination {\n position: absolute;\n text-align: center;\n -webkit-transition: 300ms opacity;\n transition: 300ms opacity;\n -webkit-transform: translate3d(0, 0, 0);\n transform: translate3d(0, 0, 0);\n z-index: 10;\n}\n.swiper-pagination.swiper-pagination-hidden {\n opacity: 0;\n}\n/* Common Styles */\n.swiper-pagination-fraction,\n.swiper-pagination-custom,\n.swiper-container-horizontal > .swiper-pagination-bullets {\n bottom: 10px;\n left: 0;\n width: 100%;\n}\n/* Bullets */\n.swiper-pagination-bullets-dynamic {\n overflow: hidden;\n font-size: 0;\n}\n.swiper-pagination-bullets-dynamic .swiper-pagination-bullet {\n -webkit-transform: scale(0.33);\n transform: scale(0.33);\n position: relative;\n}\n.swiper-pagination-bullets-dynamic .swiper-pagination-bullet-active {\n -webkit-transform: scale(1);\n transform: scale(1);\n}\n.swiper-pagination-bullets-dynamic .swiper-pagination-bullet-active-main {\n -webkit-transform: scale(1);\n transform: scale(1);\n}\n.swiper-pagination-bullets-dynamic .swiper-pagination-bullet-active-prev {\n -webkit-transform: scale(0.66);\n transform: scale(0.66);\n}\n.swiper-pagination-bullets-dynamic .swiper-pagination-bullet-active-prev-prev {\n -webkit-transform: scale(0.33);\n transform: scale(0.33);\n}\n.swiper-pagination-bullets-dynamic .swiper-pagination-bullet-active-next {\n -webkit-transform: scale(0.66);\n transform: scale(0.66);\n}\n.swiper-pagination-bullets-dynamic .swiper-pagination-bullet-active-next-next {\n -webkit-transform: scale(0.33);\n transform: scale(0.33);\n}\n.swiper-pagination-bullet {\n width: 8px;\n height: 8px;\n display: inline-block;\n border-radius: 100%;\n background: #000;\n opacity: 0.2;\n}\nbutton.swiper-pagination-bullet {\n border: none;\n margin: 0;\n padding: 0;\n box-shadow: none;\n -webkit-appearance: none;\n -moz-appearance: none;\n appearance: none;\n}\n.swiper-pagination-clickable .swiper-pagination-bullet {\n cursor: pointer;\n}\n.swiper-pagination-bullet-active {\n opacity: 1;\n background: var(--swiper-pagination-color, var(--swiper-theme-color));\n}\n.swiper-container-vertical > .swiper-pagination-bullets {\n right: 10px;\n top: 50%;\n -webkit-transform: translate3d(0px, -50%, 0);\n transform: translate3d(0px, -50%, 0);\n}\n.swiper-container-vertical > .swiper-pagination-bullets .swiper-pagination-bullet {\n margin: 6px 0;\n display: block;\n}\n.swiper-container-vertical > .swiper-pagination-bullets.swiper-pagination-bullets-dynamic {\n top: 50%;\n -webkit-transform: translateY(-50%);\n transform: translateY(-50%);\n width: 8px;\n}\n.swiper-container-vertical > .swiper-pagination-bullets.swiper-pagination-bullets-dynamic .swiper-pagination-bullet {\n display: inline-block;\n -webkit-transition: 200ms top, 200ms -webkit-transform;\n transition: 200ms top, 200ms -webkit-transform;\n transition: 200ms transform, 200ms top;\n transition: 200ms transform, 200ms top, 200ms -webkit-transform;\n}\n.swiper-container-horizontal > .swiper-pagination-bullets .swiper-pagination-bullet {\n margin: 0 4px;\n}\n.swiper-container-horizontal > .swiper-pagination-bullets.swiper-pagination-bullets-dynamic {\n left: 50%;\n -webkit-transform: translateX(-50%);\n transform: translateX(-50%);\n white-space: nowrap;\n}\n.swiper-container-horizontal > .swiper-pagination-bullets.swiper-pagination-bullets-dynamic .swiper-pagination-bullet {\n -webkit-transition: 200ms left, 200ms -webkit-transform;\n transition: 200ms left, 200ms -webkit-transform;\n transition: 200ms transform, 200ms left;\n transition: 200ms transform, 200ms left, 200ms -webkit-transform;\n}\n.swiper-container-horizontal.swiper-container-rtl > .swiper-pagination-bullets-dynamic .swiper-pagination-bullet {\n -webkit-transition: 200ms right, 200ms -webkit-transform;\n transition: 200ms right, 200ms -webkit-transform;\n transition: 200ms transform, 200ms right;\n transition: 200ms transform, 200ms right, 200ms -webkit-transform;\n}\n/* Progress */\n.swiper-pagination-progressbar {\n background: rgba(0, 0, 0, 0.25);\n position: absolute;\n}\n.swiper-pagination-progressbar .swiper-pagination-progressbar-fill {\n background: var(--swiper-pagination-color, var(--swiper-theme-color));\n position: absolute;\n left: 0;\n top: 0;\n width: 100%;\n height: 100%;\n -webkit-transform: scale(0);\n transform: scale(0);\n -webkit-transform-origin: left top;\n transform-origin: left top;\n}\n.swiper-container-rtl .swiper-pagination-progressbar .swiper-pagination-progressbar-fill {\n -webkit-transform-origin: right top;\n transform-origin: right top;\n}\n.swiper-container-horizontal > .swiper-pagination-progressbar,\n.swiper-container-vertical > .swiper-pagination-progressbar.swiper-pagination-progressbar-opposite {\n width: 100%;\n height: 4px;\n left: 0;\n top: 0;\n}\n.swiper-container-vertical > .swiper-pagination-progressbar,\n.swiper-container-horizontal > .swiper-pagination-progressbar.swiper-pagination-progressbar-opposite {\n width: 4px;\n height: 100%;\n left: 0;\n top: 0;\n}\n.swiper-pagination-white {\n --swiper-pagination-color: #ffffff;\n}\n.swiper-pagination-black {\n --swiper-pagination-color: #000000;\n}\n.swiper-pagination-lock {\n display: none;\n}\n/* Scrollbar */\n.swiper-scrollbar {\n border-radius: 10px;\n position: relative;\n -ms-touch-action: none;\n background: rgba(0, 0, 0, 0.1);\n}\n.swiper-container-horizontal > .swiper-scrollbar {\n position: absolute;\n left: 1%;\n bottom: 3px;\n z-index: 50;\n height: 5px;\n width: 98%;\n}\n.swiper-container-vertical > .swiper-scrollbar {\n position: absolute;\n right: 3px;\n top: 1%;\n z-index: 50;\n width: 5px;\n height: 98%;\n}\n.swiper-scrollbar-drag {\n height: 100%;\n width: 100%;\n position: relative;\n background: rgba(0, 0, 0, 0.5);\n border-radius: 10px;\n left: 0;\n top: 0;\n}\n.swiper-scrollbar-cursor-drag {\n cursor: move;\n}\n.swiper-scrollbar-lock {\n display: none;\n}\n.swiper-zoom-container {\n width: 100%;\n height: 100%;\n display: -webkit-box;\n display: flex;\n -webkit-box-pack: center;\n justify-content: center;\n -webkit-box-align: center;\n align-items: center;\n text-align: center;\n}\n.swiper-zoom-container > img,\n.swiper-zoom-container > svg,\n.swiper-zoom-container > canvas {\n max-width: 100%;\n max-height: 100%;\n -o-object-fit: contain;\n object-fit: contain;\n}\n.swiper-slide-zoomed {\n cursor: move;\n}\n/* Preloader */\n:root {\n /*\n --swiper-preloader-color: var(--swiper-theme-color);\n */\n}\n.swiper-lazy-preloader {\n width: 42px;\n height: 42px;\n position: absolute;\n left: 50%;\n top: 50%;\n margin-left: -21px;\n margin-top: -21px;\n z-index: 10;\n -webkit-transform-origin: 50%;\n transform-origin: 50%;\n -webkit-animation: swiper-preloader-spin 1s infinite linear;\n animation: swiper-preloader-spin 1s infinite linear;\n box-sizing: border-box;\n border: 4px solid var(--swiper-preloader-color, var(--swiper-theme-color));\n border-radius: 50%;\n border-top-color: transparent;\n}\n.swiper-lazy-preloader-white {\n --swiper-preloader-color: #fff;\n}\n.swiper-lazy-preloader-black {\n --swiper-preloader-color: #000;\n}\n@-webkit-keyframes swiper-preloader-spin {\n 100% {\n -webkit-transform: rotate(360deg);\n transform: rotate(360deg);\n }\n}\n@keyframes swiper-preloader-spin {\n 100% {\n -webkit-transform: rotate(360deg);\n transform: rotate(360deg);\n }\n}\n/* a11y */\n.swiper-container .swiper-notification {\n position: absolute;\n left: 0;\n top: 0;\n pointer-events: none;\n opacity: 0;\n z-index: -1000;\n}\n.swiper-container-fade.swiper-container-free-mode .swiper-slide {\n -webkit-transition-timing-function: ease-out;\n transition-timing-function: ease-out;\n}\n.swiper-container-fade .swiper-slide {\n pointer-events: none;\n -webkit-transition-property: opacity;\n transition-property: opacity;\n}\n.swiper-container-fade .swiper-slide .swiper-slide {\n pointer-events: none;\n}\n.swiper-container-fade .swiper-slide-active,\n.swiper-container-fade .swiper-slide-active .swiper-slide-active {\n pointer-events: auto;\n}\n.swiper-container-cube {\n overflow: visible;\n}\n.swiper-container-cube .swiper-slide {\n pointer-events: none;\n -webkit-backface-visibility: hidden;\n backface-visibility: hidden;\n z-index: 1;\n visibility: hidden;\n -webkit-transform-origin: 0 0;\n transform-origin: 0 0;\n width: 100%;\n height: 100%;\n}\n.swiper-container-cube .swiper-slide .swiper-slide {\n pointer-events: none;\n}\n.swiper-container-cube.swiper-container-rtl .swiper-slide {\n -webkit-transform-origin: 100% 0;\n transform-origin: 100% 0;\n}\n.swiper-container-cube .swiper-slide-active,\n.swiper-container-cube .swiper-slide-active .swiper-slide-active {\n pointer-events: auto;\n}\n.swiper-container-cube .swiper-slide-active,\n.swiper-container-cube .swiper-slide-next,\n.swiper-container-cube .swiper-slide-prev,\n.swiper-container-cube .swiper-slide-next + .swiper-slide {\n pointer-events: auto;\n visibility: visible;\n}\n.swiper-container-cube .swiper-slide-shadow-top,\n.swiper-container-cube .swiper-slide-shadow-bottom,\n.swiper-container-cube .swiper-slide-shadow-left,\n.swiper-container-cube .swiper-slide-shadow-right {\n z-index: 0;\n -webkit-backface-visibility: hidden;\n backface-visibility: hidden;\n}\n.swiper-container-cube .swiper-cube-shadow {\n position: absolute;\n left: 0;\n bottom: 0px;\n width: 100%;\n height: 100%;\n background: #000;\n opacity: 0.6;\n -webkit-filter: blur(50px);\n filter: blur(50px);\n z-index: 0;\n}\n.swiper-container-flip {\n overflow: visible;\n}\n.swiper-container-flip .swiper-slide {\n pointer-events: none;\n -webkit-backface-visibility: hidden;\n backface-visibility: hidden;\n z-index: 1;\n}\n.swiper-container-flip .swiper-slide .swiper-slide {\n pointer-events: none;\n}\n.swiper-container-flip .swiper-slide-active,\n.swiper-container-flip .swiper-slide-active .swiper-slide-active {\n pointer-events: auto;\n}\n.swiper-container-flip .swiper-slide-shadow-top,\n.swiper-container-flip .swiper-slide-shadow-bottom,\n.swiper-container-flip .swiper-slide-shadow-left,\n.swiper-container-flip .swiper-slide-shadow-right {\n z-index: 0;\n -webkit-backface-visibility: hidden;\n backface-visibility: hidden;\n}\n",""])},URgk:function(e,r,t){(function(e){var n=void 0!==e&&e||"undefined"!=typeof self&&self||window,a=Function.prototype.apply;function i(e,r){this._id=e,this._clearFn=r}r.setTimeout=function(){return new i(a.call(setTimeout,n,arguments),clearTimeout)},r.setInterval=function(){return new i(a.call(setInterval,n,arguments),clearInterval)},r.clearTimeout=r.clearInterval=function(e){e&&e.close()},i.prototype.unref=i.prototype.ref=function(){},i.prototype.close=function(){this._clearFn.call(n,this._id)},r.enroll=function(e,r){clearTimeout(e._idleTimeoutId),e._idleTimeout=r},r.unenroll=function(e){clearTimeout(e._idleTimeoutId),e._idleTimeout=-1},r._unrefActive=r.active=function(e){clearTimeout(e._idleTimeoutId);var r=e._idleTimeout;r>=0&&(e._idleTimeoutId=setTimeout((function(){e._onTimeout&&e._onTimeout()}),r))},t("YBdB"),r.setImmediate="undefined"!=typeof self&&self.setImmediate||void 0!==e&&e.setImmediate||this&&this.setImmediate,r.clearImmediate="undefined"!=typeof self&&self.clearImmediate||void 0!==e&&e.clearImmediate||this&&this.clearImmediate}).call(this,t("yLpj"))},XOgp:function(e,r,t){(function(e,t){(function(r){"use strict";var n=/\r?\n/g,a=/^\s*$/,i=/^(\r?\n)*[\t\s]/,o=function(e){if(!i.test(e))return e;for(var r,t,o,l=e.split(n),c=1/0,u=0;u<l.length;u++){var p=l[u];if(!a.test(p))if(r)(t=s(p,r))<c&&(c=t);else{if(" "!==(o=p.charAt(0))&&"\t"!==o)return e;(t=s(p,r=o))<c&&(c=t)}}return l.map((function(e){return e.slice(c)})).join("\n")};function s(e,r){for(var t=0;e.charAt(t)===r;)t++;return t}var l=Object.freeze({}),c=Object.prototype.toString;function u(e){return"[object Object]"===c.call(e)}function p(e,r){for(var t=Object.create(null),n=e.split(","),a=0;a<n.length;a++)t[n[a]]=!0;return r?function(e){return t[e.toLowerCase()]}:function(e){return t[e]}}var d=p("slot,component",!0);p("key,ref,slot,slot-scope,is");var f=Object.prototype.hasOwnProperty;function m(e,r){return f.call(e,r)}function g(e){var r=Object.create(null);return function(t){return r[t]||(r[t]=e(t))}}var h=/-(\w)/g,v=g((function(e){return e.replace(h,(function(e,r){return r?r.toUpperCase():""}))})),b=/\B([A-Z])/g,y=g((function(e){return e.replace(b,"-$1").toLowerCase()}));function w(e,r){for(var t in r)e[t]=r[t];return e}function A(e,r,t){}Function.prototype.bind;var x=function(e,r,t){return!1},k=p("area,base,br,col,embed,frame,hr,img,input,isindex,keygen,link,meta,param,source,track,wbr"),E=p("colgroup,dd,dt,li,options,p,td,tfoot,th,thead,tr,source"),q=p("address,article,aside,base,blockquote,body,caption,col,colgroup,dd,details,dialog,div,dl,dt,fieldset,figcaption,figure,footer,form,h1,h2,h3,h4,h5,h6,head,header,hgroup,hr,html,legend,li,menuitem,meta,optgroup,option,param,rp,rt,source,style,summary,tbody,td,tfoot,th,thead,title,tr,track");function D(e,r,t,n){Object.defineProperty(e,r,{value:t,enumerable:!!n,writable:!0,configurable:!0})}var L=/^\s*([^\s"'<>\/=]+)(?:\s*(=)\s*(?:"([^"]*)"+|'([^']*)'+|([^\s"'=<>`]+)))?/,S=/^\s*((?:v-[\w-]+:|@|:|#)\[[^=]+\][^\s"'<>\/=]*)(?:\s*(=)\s*(?:"([^"]*)"+|'([^']*)'+|([^\s"'=<>`]+)))?/,T="[a-zA-Z_][\\-\\.0-9_a-zA-Z"+/a-zA-Z\u00B7\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u037D\u037F-\u1FFF\u200C-\u200D\u203F-\u2040\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD/.source+"]*",C="((?:"+T+"\\:)?"+T+")",B=new RegExp("^<"+C),R=/^\s*(\/?)>/,F=new RegExp("^<\\/"+C+"[^>]*>"),N=/^<!DOCTYPE [^>]+>/i,O=/^<!\--/,U=/^<!\[/,I=p("script,style,textarea",!0),V={},_={"<":"<",">":">",""":'"',"&":"&"," ":"\n","	":"\t","'":"'"},M=/&(?:lt|gt|quot|amp|#39);/g,G=/&(?:lt|gt|quot|amp|#39|#10|#9);/g,P=p("pre,textarea",!0),j=function(e,r){return e&&P(e)&&"\n"===r[0]};function z(e,r){var t=r?G:M;return e.replace(t,(function(e){return _[e]}))}function H(e,r){for(var t,n,a=[],i=r.expectHTML,o=r.isUnaryTag||x,s=r.canBeLeftOpenTag||x,l=0;e;){if(t=e,n&&I(n)){var c=0,u=n.toLowerCase(),p=V[u]||(V[u]=new RegExp("([\\s\\S]*?)(</"+u+"[^>]*>)","i")),d=e.replace(p,(function(e,t,n){return c=n.length,I(u)||"noscript"===u||(t=t.replace(/<!\--([\s\S]*?)-->/g,"$1").replace(/<!\[CDATA\[([\s\S]*?)]]>/g,"$1")),j(u,t)&&(t=t.slice(1)),r.chars&&r.chars(t),""}));l+=e.length-d.length,e=d,C(u,l-c,l)}else{var f=e.indexOf("<");if(0===f){if(O.test(e)){var m=e.indexOf("--\x3e");if(m>=0){r.shouldKeepComment&&r.comment(e.substring(4,m),l,l+m+3),E(m+3);continue}}if(U.test(e)){var g=e.indexOf("]>");if(g>=0){E(g+2);continue}}var h=e.match(N);if(h){E(h[0].length);continue}var v=e.match(F);if(v){var b=l;E(v[0].length),C(v[1],b,l);continue}var y=D();if(y){T(y),j(y.tagName,e)&&E(1);continue}}var w=void 0,A=void 0,k=void 0;if(f>=0){for(A=e.slice(f);!(F.test(A)||B.test(A)||O.test(A)||U.test(A)||(k=A.indexOf("<",1))<0);)f+=k,A=e.slice(f);w=e.substring(0,f)}f<0&&(w=e),w&&E(w.length),r.chars&&w&&r.chars(w,l-w.length,l)}if(e===t){r.chars&&r.chars(e),!a.length&&r.warn&&r.warn('Mal-formatted tag at end of template: "'+e+'"',{start:l+e.length});break}}function E(r){l+=r,e=e.substring(r)}function D(){var r=e.match(B);if(r){var t,n,a={tagName:r[1],attrs:[],start:l};for(E(r[0].length);!(t=e.match(R))&&(n=e.match(S)||e.match(L));)n.start=l,E(n[0].length),n.end=l,a.attrs.push(n);if(t)return a.unarySlash=t[1],E(t[0].length),a.end=l,a}}function T(e){var t=e.tagName,l=e.unarySlash;i&&("p"===n&&q(t)&&C(n),s(t)&&n===t&&C(t));for(var c=o(t)||!!l,u=e.attrs.length,p=new Array(u),d=0;d<u;d++){var f=e.attrs[d],m=f[3]||f[4]||f[5]||"",g="a"===t&&"href"===f[1]?r.shouldDecodeNewlinesForHref:r.shouldDecodeNewlines;p[d]={name:f[1],value:z(m,g)},r.outputSourceRange&&(p[d].start=f.start+f[0].match(/^\s*/).length,p[d].end=f.end)}c||(a.push({tag:t,lowerCasedTag:t.toLowerCase(),attrs:p,start:e.start,end:e.end}),n=t),r.start&&r.start(t,p,c,e.start,e.end)}function C(e,t,i){var o,s;if(null==t&&(t=l),null==i&&(i=l),e)for(s=e.toLowerCase(),o=a.length-1;o>=0&&a[o].lowerCasedTag!==s;o--);else o=0;if(o>=0){for(var c=a.length-1;c>=o;c--)(c>o||!e&&r.warn)&&r.warn("tag <"+a[c].tag+"> has no matching end tag.",{start:a[c].start,end:a[c].end}),r.end&&r.end(a[c].tag,t,i);a.length=o,n=o&&a[o-1].tag}else"br"===s?r.start&&r.start(e,[],!0,t,i):"p"===s&&(r.start&&r.start(e,[],!1,t,i),r.end&&r.end(e,t,i))}C()}var $,Y=/\r?\n/g,J=/./g,Z=p("script,style,template",!0),K="__proto__"in{},Q="undefined"!=typeof window,W="undefined"!=typeof WXEnvironment&&!!WXEnvironment.platform,X=(W&&WXEnvironment.platform.toLowerCase(),Q&&window.navigator.userAgent.toLowerCase()),ee=X&&/msie|trident/.test(X),re=(X&&X.indexOf("msie 9.0"),X&&X.indexOf("edge/")>0),te=(X&&X.indexOf("android"),X&&/iphone|ipad|ipod|ios/.test(X),X&&/chrome\/\d+/.test(X),X&&/phantomjs/.test(X),X&&X.match(/firefox\/(\d+)/),{}.watch);if(Q)try{var ne={};Object.defineProperty(ne,"passive",{get:function(){}}),window.addEventListener("test-passive",null,ne)}catch(e){}var ae=function(){return void 0===$&&($=!Q&&!W&&void 0!==e&&e.process&&"server"===e.process.env.VUE_ENV),$};function ie(e){return"function"==typeof e&&/native code/.test(e.toString())}Q&&window.__VUE_DEVTOOLS_GLOBAL_HOOK__;var oe="undefined"!=typeof Symbol&&ie(Symbol)&&"undefined"!=typeof Reflect&&ie(Reflect.ownKeys);"undefined"!=typeof Set&&ie(Set)&&Set;var se,le=["beforeCreate","created","beforeMount","mounted","beforeUpdate","updated","beforeDestroy","destroyed","activated","deactivated","errorCaptured","serverPrefetch"],ce={optionMergeStrategies:Object.create(null),silent:!1,productionTip:!0,devtools:!0,performance:!1,errorHandler:null,warnHandler:null,ignoredElements:[],keyCodes:Object.create(null),isReservedTag:x,isReservedAttr:x,isUnknownElement:x,getTagNamespace:A,parsePlatformTagName:function(e){return e},mustUseProp:x,async:!0,_lifecycleHooks:le},ue=A,pe=A,de=A,fe="undefined"!=typeof console,me=/(?:^|[-_])(\w)/g;ue=function(e,r){var t=r?de(r):"";fe&&!ce.silent&&console.error("[Vue warn]: "+e+t)},pe=function(e,r){fe&&!ce.silent&&console.warn("[Vue tip]: "+e+(r?de(r):""))},se=function(e,r){if(e.$root===e)return"<Root>";var t="function"==typeof e&&null!=e.cid?e.options:e._isVue?e.$options||e.constructor.options:e,n=t.name||t._componentTag,a=t.__file;if(!n&&a){var i=a.match(/([^/\\]+)\.vue$/);n=i&&i[1]}return(n?"<"+function(e){return e.replace(me,(function(e){return e.toUpperCase()})).replace(/[-_]/g,"")}(n)+">":"<Anonymous>")+(a&&!1!==r?" at "+a:"")},de=function(e){if(e._isVue&&e.$parent){for(var r=[],t=0;e;){if(r.length>0){var n=r[r.length-1];if(n.constructor===e.constructor){t++,e=e.$parent;continue}t>0&&(r[r.length-1]=[n,t],t=0)}r.push(e),e=e.$parent}return"\n\nfound in\n\n"+r.map((function(e,r){return""+(0===r?"---\x3e ":function(e,r){for(var t="";r;)r%2==1&&(t+=e),r>1&&(e+=e),r>>=1;return t}(" ",5+2*r))+(Array.isArray(e)?se(e[0])+"... ("+e[1]+" recursive calls)":se(e))})).join("\n")}return"\n\n(found in "+se(e)+")"};var ge=0,he=function(){this.id=ge++,this.subs=[]};he.prototype.addSub=function(e){this.subs.push(e)},he.prototype.removeSub=function(e){!function(e,r){if(e.length){var t=e.indexOf(r);t>-1&&e.splice(t,1)}}(this.subs,e)},he.prototype.depend=function(){he.target&&he.target.addDep(this)},he.prototype.notify=function(){for(var e=this.subs.slice(),r=0,t=e.length;r<t;r++)e[r].update()},he.target=null;var ve=function(e,r,t,n,a,i,o,s){this.tag=e,this.data=r,this.children=t,this.text=n,this.elm=a,this.ns=void 0,this.context=i,this.fnContext=void 0,this.fnOptions=void 0,this.fnScopeId=void 0,this.key=r&&r.key,this.componentOptions=o,this.componentInstance=void 0,this.parent=void 0,this.raw=!1,this.isStatic=!1,this.isRootInsert=!0,this.isComment=!1,this.isCloned=!1,this.isOnce=!1,this.asyncFactory=s,this.asyncMeta=void 0,this.isAsyncPlaceholder=!1},be={child:{configurable:!0}};be.child.get=function(){return this.componentInstance},Object.defineProperties(ve.prototype,be);var ye=Array.prototype,we=Object.create(ye);["push","pop","shift","unshift","splice","sort","reverse"].forEach((function(e){var r=ye[e];D(we,e,(function(){for(var t=[],n=arguments.length;n--;)t[n]=arguments[n];var a,i=r.apply(this,t),o=this.__ob__;switch(e){case"push":case"unshift":a=t;break;case"splice":a=t.slice(2)}return a&&o.observeArray(a),o.dep.notify(),i}))}));var Ae=Object.getOwnPropertyNames(we),xe=!0,ke=function(e){var r;this.value=e,this.dep=new he,this.vmCount=0,D(e,"__ob__",this),Array.isArray(e)?(K?(r=we,e.__proto__=r):function(e,r,t){for(var n=0,a=t.length;n<a;n++){var i=t[n];D(e,i,r[i])}}(e,we,Ae),this.observeArray(e)):this.walk(e)};function Ee(e,r){var t,n;if(!(null===(t=e)||"object"!=typeof t||e instanceof ve))return m(e,"__ob__")&&e.__ob__ instanceof ke?n=e.__ob__:xe&&!ae()&&(Array.isArray(e)||u(e))&&Object.isExtensible(e)&&!e._isVue&&(n=new ke(e)),r&&n&&n.vmCount++,n}function qe(e,r,t,n,a){var i=new he,o=Object.getOwnPropertyDescriptor(e,r);if(!o||!1!==o.configurable){var s=o&&o.get,l=o&&o.set;s&&!l||2!==arguments.length||(t=e[r]);var c=!a&&Ee(t);Object.defineProperty(e,r,{enumerable:!0,configurable:!0,get:function(){var r=s?s.call(e):t;return he.target&&(i.depend(),c&&(c.dep.depend(),Array.isArray(r)&&function e(r){for(var t=void 0,n=0,a=r.length;n<a;n++)(t=r[n])&&t.__ob__&&t.__ob__.dep.depend(),Array.isArray(t)&&e(t)}(r))),r},set:function(r){var o=s?s.call(e):t;r===o||r!=r&&o!=o||(n&&n(),s&&!l||(l?l.call(e,r):t=r,c=!a&&Ee(r),i.notify()))}})}}function De(e,r,t){var n;if((null==e||"string"==typeof(n=e)||"number"==typeof n||"symbol"==typeof n||"boolean"==typeof n)&&ue("Cannot set reactive property on undefined, null, or primitive value: "+e),Array.isArray(e)&&function(e){var r=parseFloat(String(e));return r>=0&&Math.floor(r)===r&&isFinite(e)}(r))return e.length=Math.max(e.length,r),e.splice(r,1,t),t;if(r in e&&!(r in Object.prototype))return e[r]=t,t;var a=e.__ob__;return e._isVue||a&&a.vmCount?(ue("Avoid adding reactive properties to a Vue instance or its root $data at runtime - declare it upfront in the data option."),t):a?(qe(a.value,r,t),a.dep.notify(),t):(e[r]=t,t)}ke.prototype.walk=function(e){for(var r=Object.keys(e),t=0;t<r.length;t++)qe(e,r[t])},ke.prototype.observeArray=function(e){for(var r=0,t=e.length;r<t;r++)Ee(e[r])};var Le=ce.optionMergeStrategies;function Se(e,r){if(!r)return e;for(var t,n,a,i=oe?Reflect.ownKeys(r):Object.keys(r),o=0;o<i.length;o++)"__ob__"!==(t=i[o])&&(n=e[t],a=r[t],m(e,t)?n!==a&&u(n)&&u(a)&&Se(n,a):De(e,t,a));return e}function Te(e,r,t){return t?function(){var n="function"==typeof r?r.call(t,t):r,a="function"==typeof e?e.call(t,t):e;return n?Se(n,a):a}:r?e?function(){return Se("function"==typeof r?r.call(this,this):r,"function"==typeof e?e.call(this,this):e)}:r:e}function Ce(e,r){var t=r?e?e.concat(r):Array.isArray(r)?r:[r]:e;return t?function(e){for(var r=[],t=0;t<e.length;t++)-1===r.indexOf(e[t])&&r.push(e[t]);return r}(t):t}function Be(e,r,t,n){var a=Object.create(e||null);return r?(Fe(n,r,t),w(a,r)):a}Le.el=Le.propsData=function(e,r,t,n){return t||ue('option "'+n+'" can only be used during instance creation with the `new` keyword.'),Re(e,r)},Le.data=function(e,r,t){return t?Te(e,r,t):r&&"function"!=typeof r?(ue('The "data" option should be a function that returns a per-instance value in component definitions.',t),e):Te(e,r)},le.forEach((function(e){Le[e]=Ce})),["component","directive","filter"].forEach((function(e){Le[e+"s"]=Be})),Le.watch=function(e,r,t,n){if(e===te&&(e=void 0),r===te&&(r=void 0),!r)return Object.create(e||null);if(Fe(n,r,t),!e)return r;var a={};for(var i in w(a,e),r){var o=a[i],s=r[i];o&&!Array.isArray(o)&&(o=[o]),a[i]=o?o.concat(s):Array.isArray(s)?s:[s]}return a},Le.props=Le.methods=Le.inject=Le.computed=function(e,r,t,n){if(r&&Fe(n,r,t),!e)return r;var a=Object.create(null);return w(a,e),r&&w(a,r),a},Le.provide=Te;var Re=function(e,r){return void 0===r?e:r};function Fe(e,r,t){u(r)||ue('Invalid value for option "'+e+'": expected an Object, but got '+function(e){return c.call(e).slice(8,-1)}(r)+".",t)}var Ne=[];if("undefined"!=typeof Promise&&ie(Promise));else if(ee||"undefined"==typeof MutationObserver||!ie(MutationObserver)&&"[object MutationObserverConstructor]"!==MutationObserver.toString())void 0!==t&&ie(t);else{var Oe=new MutationObserver((function(){var e=Ne.slice(0);Ne.length=0;for(var r=0;r<e.length;r++)e[r]()})),Ue=document.createTextNode(String(1));Oe.observe(Ue,{characterData:!0})}p("style,class");var Ie=p("input,textarea,option,select,progress"),Ve=p("contenteditable,draggable,spellcheck"),_e=(p("events,caret,typing,plaintext-only"),p("allowfullscreen,async,autofocus,autoplay,checked,compact,controls,declare,default,defaultchecked,defaultmuted,defaultselected,defer,disabled,enabled,formnovalidate,hidden,indeterminate,inert,ismap,itemscope,loop,multiple,muted,nohref,noresize,noshade,novalidate,nowrap,open,pauseonexit,readonly,required,reversed,scoped,seamless,selected,sortable,translate,truespeed,typemustmatch,visible")),Me=p("html,body,base,head,link,meta,style,title,address,article,aside,footer,header,h1,h2,h3,h4,h5,h6,hgroup,nav,section,div,dd,dl,dt,figcaption,figure,picture,hr,img,li,main,ol,p,pre,ul,a,b,abbr,bdi,bdo,br,cite,code,data,dfn,em,i,kbd,mark,q,rp,rt,rtc,ruby,s,samp,small,span,strong,sub,sup,time,u,var,wbr,area,audio,map,track,video,embed,object,param,source,canvas,script,noscript,del,ins,caption,col,colgroup,table,thead,tbody,td,th,tr,button,datalist,fieldset,form,input,label,legend,meter,optgroup,option,output,progress,select,textarea,details,dialog,menu,menuitem,summary,content,element,shadow,template,blockquote,iframe,tfoot"),Ge=p("svg,animate,circle,clippath,cursor,defs,desc,ellipse,filter,font-face,foreignObject,g,glyph,image,line,marker,mask,missing-glyph,path,pattern,polygon,polyline,rect,switch,symbol,text,textpath,tspan,use,view",!0);p("text,number,password,search,email,tel,url");var Pe=/[\w).+\-_$\]]/;function je(e){var r,t,n,a,i,o=!1,s=!1,l=!1,c=!1,u=0,p=0,d=0,f=0;for(n=0;n<e.length;n++)if(t=r,r=e.charCodeAt(n),o)39===r&&92!==t&&(o=!1);else if(s)34===r&&92!==t&&(s=!1);else if(l)96===r&&92!==t&&(l=!1);else if(c)47===r&&92!==t&&(c=!1);else if(124!==r||124===e.charCodeAt(n+1)||124===e.charCodeAt(n-1)||u||p||d){switch(r){case 34:s=!0;break;case 39:o=!0;break;case 96:l=!0;break;case 40:d++;break;case 41:d--;break;case 91:p++;break;case 93:p--;break;case 123:u++;break;case 125:u--}if(47===r){for(var m=n-1,g=void 0;m>=0&&" "===(g=e.charAt(m));m--);g&&Pe.test(g)||(c=!0)}}else void 0===a?(f=n+1,a=e.slice(0,n).trim()):h();function h(){(i||(i=[])).push(e.slice(f,n).trim()),f=n+1}if(void 0===a?a=e.slice(0,n).trim():0!==f&&h(),i)for(n=0;n<i.length;n++)a=ze(a,i[n]);return a}function ze(e,r){var t=r.indexOf("(");if(t<0)return'_f("'+r+'")('+e+")";var n=r.slice(0,t),a=r.slice(t+1);return'_f("'+n+'")('+e+(")"!==a?","+a:a)}var He=/\{\{((?:.|\r?\n)+?)\}\}/g,$e=/[-.*+?^${}()|[\]\/\\]/g,Ye=g((function(e){var r=e[0].replace($e,"\\$&"),t=e[1].replace($e,"\\$&");return new RegExp(r+"((?:.|\\n)+?)"+t,"g")}));function Je(e,r){var t=r?Ye(r):He;if(t.test(e)){for(var n,a,i,o=[],s=[],l=t.lastIndex=0;n=t.exec(e);){(a=n.index)>l&&(s.push(i=e.slice(l,a)),o.push(JSON.stringify(i)));var c=je(n[1].trim());o.push("_s("+c+")"),s.push({"@binding":c}),l=a+n[0].length}return l<e.length&&(s.push(i=e.slice(l)),o.push(JSON.stringify(i))),{expression:o.join("+"),tokens:s}}}function Ze(e,r){console.error("[Vue compiler]: "+e)}function Ke(e,r){return e?e.map((function(e){return e[r]})).filter((function(e){return e})):[]}function Qe(e,r,t,n,a){(e.props||(e.props=[])).push(sr({name:r,value:t,dynamic:a},n)),e.plain=!1}function We(e,r,t,n,a){(a?e.dynamicAttrs||(e.dynamicAttrs=[]):e.attrs||(e.attrs=[])).push(sr({name:r,value:t,dynamic:a},n)),e.plain=!1}function Xe(e,r,t,n){e.attrsMap[r]=t,e.attrsList.push(sr({name:r,value:t},n))}function er(e,r,t,n,a,i,o,s){(e.directives||(e.directives=[])).push(sr({name:r,rawName:t,value:n,arg:a,isDynamicArg:i,modifiers:o},s)),e.plain=!1}function rr(e,r,t){return t?"_p("+r+',"'+e+'")':e+r}function tr(e,r,t,n,a,i,o,s){var c;n=n||l,i&&n.prevent&&n.passive&&i("passive and prevent can't be used together. Passive handler can't prevent default event.",o),n.right?s?r="("+r+")==='click'?'contextmenu':("+r+")":"click"===r&&(r="contextmenu",delete n.right):n.middle&&(s?r="("+r+")==='click'?'mouseup':("+r+")":"click"===r&&(r="mouseup")),n.capture&&(delete n.capture,r=rr("!",r,s)),n.once&&(delete n.once,r=rr("~",r,s)),n.passive&&(delete n.passive,r=rr("&",r,s)),n.native?(delete n.native,c=e.nativeEvents||(e.nativeEvents={})):c=e.events||(e.events={});var u=sr({value:t.trim(),dynamic:s},o);n!==l&&(u.modifiers=n);var p=c[r];Array.isArray(p)?a?p.unshift(u):p.push(u):c[r]=p?a?[u,p]:[p,u]:u,e.plain=!1}function nr(e,r){return e.rawAttrsMap[":"+r]||e.rawAttrsMap["v-bind:"+r]||e.rawAttrsMap[r]}function ar(e,r,t){var n=ir(e,":"+r)||ir(e,"v-bind:"+r);if(null!=n)return je(n);if(!1!==t){var a=ir(e,r);if(null!=a)return JSON.stringify(a)}}function ir(e,r,t){var n;if(null!=(n=e.attrsMap[r]))for(var a=e.attrsList,i=0,o=a.length;i<o;i++)if(a[i].name===r){a.splice(i,1);break}return t&&delete e.attrsMap[r],n}function or(e,r){for(var t=e.attrsList,n=0,a=t.length;n<a;n++){var i=t[n];if(r.test(i.name))return t.splice(n,1),i}}function sr(e,r){return r&&(null!=r.start&&(e.start=r.start),null!=r.end&&(e.end=r.end)),e}var lr,cr,ur,pr,dr,fr,mr={staticKeys:["staticClass"],transformNode:function(e,r){var t=r.warn||Ze,n=ir(e,"class");n&&Je(n,r.delimiters)&&t('class="'+n+'": Interpolation inside attributes has been removed. Use v-bind or the colon shorthand instead. For example, instead of <div class="{{ val }}">, use <div :class="val">.',e.rawAttrsMap.class),n&&(e.staticClass=JSON.stringify(n));var a=ar(e,"class",!1);a&&(e.classBinding=a)},genData:function(e){var r="";return e.staticClass&&(r+="staticClass:"+e.staticClass+","),e.classBinding&&(r+="class:"+e.classBinding+","),r}},gr=g((function(e){var r={},t=/:(.+)/;return e.split(/;(?![^(]*\))/g).forEach((function(e){if(e){var n=e.split(t);n.length>1&&(r[n[0].trim()]=n[1].trim())}})),r})),hr={staticKeys:["staticStyle"],transformNode:function(e,r){var t=r.warn||Ze,n=ir(e,"style");n&&(Je(n,r.delimiters)&&t('style="'+n+'": Interpolation inside attributes has been removed. Use v-bind or the colon shorthand instead. For example, instead of <div style="{{ val }}">, use <div :style="val">.',e.rawAttrsMap.style),e.staticStyle=JSON.stringify(gr(n)));var a=ar(e,"style",!1);a&&(e.styleBinding=a)},genData:function(e){var r="";return e.staticStyle&&(r+="staticStyle:"+e.staticStyle+","),e.styleBinding&&(r+="style:("+e.styleBinding+"),"),r}},vr="undefined"!=typeof window?window:void 0!==e?e:"undefined"!=typeof self?self:{},br=function(e,r){return e(r={exports:{}},r.exports),r.exports}((function(e,r){!function(t){var n=r,a=e&&e.exports==n&&e,i="object"==typeof vr&&vr;i.global!==i&&i.window!==i||(t=i);var o=/[\uD800-\uDBFF][\uDC00-\uDFFF]/g,s=/[\x01-\x7F]/g,l=/[\x01-\t\x0B\f\x0E-\x1F\x7F\x81\x8D\x8F\x90\x9D\xA0-\uFFFF]/g,c=/<\u20D2|=\u20E5|>\u20D2|\u205F\u200A|\u219D\u0338|\u2202\u0338|\u2220\u20D2|\u2229\uFE00|\u222A\uFE00|\u223C\u20D2|\u223D\u0331|\u223E\u0333|\u2242\u0338|\u224B\u0338|\u224D\u20D2|\u224E\u0338|\u224F\u0338|\u2250\u0338|\u2261\u20E5|\u2264\u20D2|\u2265\u20D2|\u2266\u0338|\u2267\u0338|\u2268\uFE00|\u2269\uFE00|\u226A\u0338|\u226A\u20D2|\u226B\u0338|\u226B\u20D2|\u227F\u0338|\u2282\u20D2|\u2283\u20D2|\u228A\uFE00|\u228B\uFE00|\u228F\u0338|\u2290\u0338|\u2293\uFE00|\u2294\uFE00|\u22B4\u20D2|\u22B5\u20D2|\u22D8\u0338|\u22D9\u0338|\u22DA\uFE00|\u22DB\uFE00|\u22F5\u0338|\u22F9\u0338|\u2933\u0338|\u29CF\u0338|\u29D0\u0338|\u2A6D\u0338|\u2A70\u0338|\u2A7D\u0338|\u2A7E\u0338|\u2AA1\u0338|\u2AA2\u0338|\u2AAC\uFE00|\u2AAD\uFE00|\u2AAF\u0338|\u2AB0\u0338|\u2AC5\u0338|\u2AC6\u0338|\u2ACB\uFE00|\u2ACC\uFE00|\u2AFD\u20E5|[\xA0-\u0113\u0116-\u0122\u0124-\u012B\u012E-\u014D\u0150-\u017E\u0192\u01B5\u01F5\u0237\u02C6\u02C7\u02D8-\u02DD\u0311\u0391-\u03A1\u03A3-\u03A9\u03B1-\u03C9\u03D1\u03D2\u03D5\u03D6\u03DC\u03DD\u03F0\u03F1\u03F5\u03F6\u0401-\u040C\u040E-\u044F\u0451-\u045C\u045E\u045F\u2002-\u2005\u2007-\u2010\u2013-\u2016\u2018-\u201A\u201C-\u201E\u2020-\u2022\u2025\u2026\u2030-\u2035\u2039\u203A\u203E\u2041\u2043\u2044\u204F\u2057\u205F-\u2063\u20AC\u20DB\u20DC\u2102\u2105\u210A-\u2113\u2115-\u211E\u2122\u2124\u2127-\u2129\u212C\u212D\u212F-\u2131\u2133-\u2138\u2145-\u2148\u2153-\u215E\u2190-\u219B\u219D-\u21A7\u21A9-\u21AE\u21B0-\u21B3\u21B5-\u21B7\u21BA-\u21DB\u21DD\u21E4\u21E5\u21F5\u21FD-\u2205\u2207-\u2209\u220B\u220C\u220F-\u2214\u2216-\u2218\u221A\u221D-\u2238\u223A-\u2257\u2259\u225A\u225C\u225F-\u2262\u2264-\u228B\u228D-\u229B\u229D-\u22A5\u22A7-\u22B0\u22B2-\u22BB\u22BD-\u22DB\u22DE-\u22E3\u22E6-\u22F7\u22F9-\u22FE\u2305\u2306\u2308-\u2310\u2312\u2313\u2315\u2316\u231C-\u231F\u2322\u2323\u232D\u232E\u2336\u233D\u233F\u237C\u23B0\u23B1\u23B4-\u23B6\u23DC-\u23DF\u23E2\u23E7\u2423\u24C8\u2500\u2502\u250C\u2510\u2514\u2518\u251C\u2524\u252C\u2534\u253C\u2550-\u256C\u2580\u2584\u2588\u2591-\u2593\u25A1\u25AA\u25AB\u25AD\u25AE\u25B1\u25B3-\u25B5\u25B8\u25B9\u25BD-\u25BF\u25C2\u25C3\u25CA\u25CB\u25EC\u25EF\u25F8-\u25FC\u2605\u2606\u260E\u2640\u2642\u2660\u2663\u2665\u2666\u266A\u266D-\u266F\u2713\u2717\u2720\u2736\u2758\u2772\u2773\u27C8\u27C9\u27E6-\u27ED\u27F5-\u27FA\u27FC\u27FF\u2902-\u2905\u290C-\u2913\u2916\u2919-\u2920\u2923-\u292A\u2933\u2935-\u2939\u293C\u293D\u2945\u2948-\u294B\u294E-\u2976\u2978\u2979\u297B-\u297F\u2985\u2986\u298B-\u2996\u299A\u299C\u299D\u29A4-\u29B7\u29B9\u29BB\u29BC\u29BE-\u29C5\u29C9\u29CD-\u29D0\u29DC-\u29DE\u29E3-\u29E5\u29EB\u29F4\u29F6\u2A00-\u2A02\u2A04\u2A06\u2A0C\u2A0D\u2A10-\u2A17\u2A22-\u2A27\u2A29\u2A2A\u2A2D-\u2A31\u2A33-\u2A3C\u2A3F\u2A40\u2A42-\u2A4D\u2A50\u2A53-\u2A58\u2A5A-\u2A5D\u2A5F\u2A66\u2A6A\u2A6D-\u2A75\u2A77-\u2A9A\u2A9D-\u2AA2\u2AA4-\u2AB0\u2AB3-\u2AC8\u2ACB\u2ACC\u2ACF-\u2ADB\u2AE4\u2AE6-\u2AE9\u2AEB-\u2AF3\u2AFD\uFB00-\uFB04]|\uD835[\uDC9C\uDC9E\uDC9F\uDCA2\uDCA5\uDCA6\uDCA9-\uDCAC\uDCAE-\uDCB9\uDCBB\uDCBD-\uDCC3\uDCC5-\uDCCF\uDD04\uDD05\uDD07-\uDD0A\uDD0D-\uDD14\uDD16-\uDD1C\uDD1E-\uDD39\uDD3B-\uDD3E\uDD40-\uDD44\uDD46\uDD4A-\uDD50\uDD52-\uDD6B]/g,u={"":"shy","":"zwnj","":"zwj","":"lrm","":"ic","":"it","":"af","":"rlm","":"ZeroWidthSpace","":"NoBreak","̑":"DownBreve","⃛":"tdot","⃜":"DotDot","\t":"Tab","\n":"NewLine"," ":"puncsp"," ":"MediumSpace"," ":"thinsp"," ":"hairsp"," ":"emsp13"," ":"ensp"," ":"emsp14"," ":"emsp"," ":"numsp"," ":"nbsp"," ":"ThickSpace","‾":"oline",_:"lowbar","‐":"dash","–":"ndash","—":"mdash","―":"horbar",",":"comma",";":"semi","⁏":"bsemi",":":"colon","⩴":"Colone","!":"excl","¡":"iexcl","?":"quest","¿":"iquest",".":"period","‥":"nldr","…":"mldr","·":"middot","'":"apos","‘":"lsquo","’":"rsquo","‚":"sbquo","‹":"lsaquo","›":"rsaquo",'"':"quot","“":"ldquo","”":"rdquo","„":"bdquo","«":"laquo","»":"raquo","(":"lpar",")":"rpar","[":"lsqb","]":"rsqb","{":"lcub","}":"rcub","⌈":"lceil","⌉":"rceil","⌊":"lfloor","⌋":"rfloor","⦅":"lopar","⦆":"ropar","⦋":"lbrke","⦌":"rbrke","⦍":"lbrkslu","⦎":"rbrksld","⦏":"lbrksld","⦐":"rbrkslu","⦑":"langd","⦒":"rangd","⦓":"lparlt","⦔":"rpargt","⦕":"gtlPar","⦖":"ltrPar","⟦":"lobrk","⟧":"robrk","⟨":"lang","⟩":"rang","⟪":"Lang","⟫":"Rang","⟬":"loang","⟭":"roang","❲":"lbbrk","❳":"rbbrk","‖":"Vert","§":"sect","¶":"para","@":"commat","*":"ast","/":"sol",undefined:null,"&":"amp","#":"num","%":"percnt","‰":"permil","‱":"pertenk","†":"dagger","‡":"Dagger","•":"bull","⁃":"hybull","′":"prime","″":"Prime","‴":"tprime","⁗":"qprime","‵":"bprime","⁁":"caret","`":"grave","´":"acute","˜":"tilde","^":"Hat","¯":"macr","˘":"breve","˙":"dot","¨":"die","˚":"ring","˝":"dblac","¸":"cedil","˛":"ogon","ˆ":"circ","ˇ":"caron","°":"deg","©":"copy","®":"reg","℗":"copysr","℘":"wp","℞":"rx","℧":"mho","℩":"iiota","←":"larr","↚":"nlarr","→":"rarr","↛":"nrarr","↑":"uarr","↓":"darr","↔":"harr","↮":"nharr","↕":"varr","↖":"nwarr","↗":"nearr","↘":"searr","↙":"swarr","↝":"rarrw","↝̸":"nrarrw","↞":"Larr","↟":"Uarr","↠":"Rarr","↡":"Darr","↢":"larrtl","↣":"rarrtl","↤":"mapstoleft","↥":"mapstoup","↦":"map","↧":"mapstodown","↩":"larrhk","↪":"rarrhk","↫":"larrlp","↬":"rarrlp","↭":"harrw","↰":"lsh","↱":"rsh","↲":"ldsh","↳":"rdsh","↵":"crarr","↶":"cularr","↷":"curarr","↺":"olarr","↻":"orarr","↼":"lharu","↽":"lhard","↾":"uharr","↿":"uharl","⇀":"rharu","⇁":"rhard","⇂":"dharr","⇃":"dharl","⇄":"rlarr","⇅":"udarr","⇆":"lrarr","⇇":"llarr","⇈":"uuarr","⇉":"rrarr","⇊":"ddarr","⇋":"lrhar","⇌":"rlhar","⇐":"lArr","⇍":"nlArr","⇑":"uArr","⇒":"rArr","⇏":"nrArr","⇓":"dArr","⇔":"iff","⇎":"nhArr","⇕":"vArr","⇖":"nwArr","⇗":"neArr","⇘":"seArr","⇙":"swArr","⇚":"lAarr","⇛":"rAarr","⇝":"zigrarr","⇤":"larrb","⇥":"rarrb","⇵":"duarr","⇽":"loarr","⇾":"roarr","⇿":"hoarr","∀":"forall","∁":"comp","∂":"part","∂̸":"npart","∃":"exist","∄":"nexist","∅":"empty","∇":"Del","∈":"in","∉":"notin","∋":"ni","∌":"notni","϶":"bepsi","∏":"prod","∐":"coprod","∑":"sum","+":"plus","±":"pm","÷":"div","×":"times","<":"lt","≮":"nlt","<⃒":"nvlt","=":"equals","≠":"ne","=⃥":"bne","⩵":"Equal",">":"gt","≯":"ngt",">⃒":"nvgt","¬":"not","|":"vert","¦":"brvbar","−":"minus","∓":"mp","∔":"plusdo","⁄":"frasl","∖":"setmn","∗":"lowast","∘":"compfn","√":"Sqrt","∝":"prop","∞":"infin","∟":"angrt","∠":"ang","∠⃒":"nang","∡":"angmsd","∢":"angsph","∣":"mid","∤":"nmid","∥":"par","∦":"npar","∧":"and","∨":"or","∩":"cap","∩︀":"caps","∪":"cup","∪︀":"cups","∫":"int","∬":"Int","∭":"tint","⨌":"qint","∮":"oint","∯":"Conint","∰":"Cconint","∱":"cwint","∲":"cwconint","∳":"awconint","∴":"there4","∵":"becaus","∶":"ratio","∷":"Colon","∸":"minusd","∺":"mDDot","∻":"homtht","∼":"sim","≁":"nsim","∼⃒":"nvsim","∽":"bsim","∽̱":"race","∾":"ac","∾̳":"acE","∿":"acd","≀":"wr","≂":"esim","≂̸":"nesim","≃":"sime","≄":"nsime","≅":"cong","≇":"ncong","≆":"simne","≈":"ap","≉":"nap","≊":"ape","≋":"apid","≋̸":"napid","≌":"bcong","≍":"CupCap","≭":"NotCupCap","≍⃒":"nvap","≎":"bump","≎̸":"nbump","≏":"bumpe","≏̸":"nbumpe","≐":"doteq","≐̸":"nedot","≑":"eDot","≒":"efDot","≓":"erDot","≔":"colone","≕":"ecolon","≖":"ecir","≗":"cire","≙":"wedgeq","≚":"veeeq","≜":"trie","≟":"equest","≡":"equiv","≢":"nequiv","≡⃥":"bnequiv","≤":"le","≰":"nle","≤⃒":"nvle","≥":"ge","≱":"nge","≥⃒":"nvge","≦":"lE","≦̸":"nlE","≧":"gE","≧̸":"ngE","≨︀":"lvnE","≨":"lnE","≩":"gnE","≩︀":"gvnE","≪":"ll","≪̸":"nLtv","≪⃒":"nLt","≫":"gg","≫̸":"nGtv","≫⃒":"nGt","≬":"twixt","≲":"lsim","≴":"nlsim","≳":"gsim","≵":"ngsim","≶":"lg","≸":"ntlg","≷":"gl","≹":"ntgl","≺":"pr","⊀":"npr","≻":"sc","⊁":"nsc","≼":"prcue","⋠":"nprcue","≽":"sccue","⋡":"nsccue","≾":"prsim","≿":"scsim","≿̸":"NotSucceedsTilde","⊂":"sub","⊄":"nsub","⊂⃒":"vnsub","⊃":"sup","⊅":"nsup","⊃⃒":"vnsup","⊆":"sube","⊈":"nsube","⊇":"supe","⊉":"nsupe","⊊︀":"vsubne","⊊":"subne","⊋︀":"vsupne","⊋":"supne","⊍":"cupdot","⊎":"uplus","⊏":"sqsub","⊏̸":"NotSquareSubset","⊐":"sqsup","⊐̸":"NotSquareSuperset","⊑":"sqsube","⋢":"nsqsube","⊒":"sqsupe","⋣":"nsqsupe","⊓":"sqcap","⊓︀":"sqcaps","⊔":"sqcup","⊔︀":"sqcups","⊕":"oplus","⊖":"ominus","⊗":"otimes","⊘":"osol","⊙":"odot","⊚":"ocir","⊛":"oast","⊝":"odash","⊞":"plusb","⊟":"minusb","⊠":"timesb","⊡":"sdotb","⊢":"vdash","⊬":"nvdash","⊣":"dashv","⊤":"top","⊥":"bot","⊧":"models","⊨":"vDash","⊭":"nvDash","⊩":"Vdash","⊮":"nVdash","⊪":"Vvdash","⊫":"VDash","⊯":"nVDash","⊰":"prurel","⊲":"vltri","⋪":"nltri","⊳":"vrtri","⋫":"nrtri","⊴":"ltrie","⋬":"nltrie","⊴⃒":"nvltrie","⊵":"rtrie","⋭":"nrtrie","⊵⃒":"nvrtrie","⊶":"origof","⊷":"imof","⊸":"mumap","⊹":"hercon","⊺":"intcal","⊻":"veebar","⊽":"barvee","⊾":"angrtvb","⊿":"lrtri","⋀":"Wedge","⋁":"Vee","⋂":"xcap","⋃":"xcup","⋄":"diam","⋅":"sdot","⋆":"Star","⋇":"divonx","⋈":"bowtie","⋉":"ltimes","⋊":"rtimes","⋋":"lthree","⋌":"rthree","⋍":"bsime","⋎":"cuvee","⋏":"cuwed","⋐":"Sub","⋑":"Sup","⋒":"Cap","⋓":"Cup","⋔":"fork","⋕":"epar","⋖":"ltdot","⋗":"gtdot","⋘":"Ll","⋘̸":"nLl","⋙":"Gg","⋙̸":"nGg","⋚︀":"lesg","⋚":"leg","⋛":"gel","⋛︀":"gesl","⋞":"cuepr","⋟":"cuesc","⋦":"lnsim","⋧":"gnsim","⋨":"prnsim","⋩":"scnsim","⋮":"vellip","⋯":"ctdot","⋰":"utdot","⋱":"dtdot","⋲":"disin","⋳":"isinsv","⋴":"isins","⋵":"isindot","⋵̸":"notindot","⋶":"notinvc","⋷":"notinvb","⋹":"isinE","⋹̸":"notinE","⋺":"nisd","⋻":"xnis","⋼":"nis","⋽":"notnivc","⋾":"notnivb","⌅":"barwed","⌆":"Barwed","⌌":"drcrop","⌍":"dlcrop","⌎":"urcrop","⌏":"ulcrop","⌐":"bnot","⌒":"profline","⌓":"profsurf","⌕":"telrec","⌖":"target","⌜":"ulcorn","⌝":"urcorn","⌞":"dlcorn","⌟":"drcorn","⌢":"frown","⌣":"smile","⌭":"cylcty","⌮":"profalar","⌶":"topbot","⌽":"ovbar","⌿":"solbar","⍼":"angzarr","⎰":"lmoust","⎱":"rmoust","⎴":"tbrk","⎵":"bbrk","⎶":"bbrktbrk","⏜":"OverParenthesis","⏝":"UnderParenthesis","⏞":"OverBrace","⏟":"UnderBrace","⏢":"trpezium","⏧":"elinters","␣":"blank","─":"boxh","│":"boxv","┌":"boxdr","┐":"boxdl","└":"boxur","┘":"boxul","├":"boxvr","┤":"boxvl","┬":"boxhd","┴":"boxhu","┼":"boxvh","═":"boxH","║":"boxV","╒":"boxdR","╓":"boxDr","╔":"boxDR","╕":"boxdL","╖":"boxDl","╗":"boxDL","╘":"boxuR","╙":"boxUr","╚":"boxUR","╛":"boxuL","╜":"boxUl","╝":"boxUL","╞":"boxvR","╟":"boxVr","╠":"boxVR","╡":"boxvL","╢":"boxVl","╣":"boxVL","╤":"boxHd","╥":"boxhD","╦":"boxHD","╧":"boxHu","╨":"boxhU","╩":"boxHU","╪":"boxvH","╫":"boxVh","╬":"boxVH","▀":"uhblk","▄":"lhblk","█":"block","░":"blk14","▒":"blk12","▓":"blk34","□":"squ","▪":"squf","▫":"EmptyVerySmallSquare","▭":"rect","▮":"marker","▱":"fltns","△":"xutri","▴":"utrif","▵":"utri","▸":"rtrif","▹":"rtri","▽":"xdtri","▾":"dtrif","▿":"dtri","◂":"ltrif","◃":"ltri","◊":"loz","○":"cir","◬":"tridot","◯":"xcirc","◸":"ultri","◹":"urtri","◺":"lltri","◻":"EmptySmallSquare","◼":"FilledSmallSquare","★":"starf","☆":"star","☎":"phone","♀":"female","♂":"male","♠":"spades","♣":"clubs","♥":"hearts","♦":"diams","♪":"sung","✓":"check","✗":"cross","✠":"malt","✶":"sext","❘":"VerticalSeparator","⟈":"bsolhsub","⟉":"suphsol","⟵":"xlarr","⟶":"xrarr","⟷":"xharr","⟸":"xlArr","⟹":"xrArr","⟺":"xhArr","⟼":"xmap","⟿":"dzigrarr","⤂":"nvlArr","⤃":"nvrArr","⤄":"nvHarr","⤅":"Map","⤌":"lbarr","⤍":"rbarr","⤎":"lBarr","⤏":"rBarr","⤐":"RBarr","⤑":"DDotrahd","⤒":"UpArrowBar","⤓":"DownArrowBar","⤖":"Rarrtl","⤙":"latail","⤚":"ratail","⤛":"lAtail","⤜":"rAtail","⤝":"larrfs","⤞":"rarrfs","⤟":"larrbfs","⤠":"rarrbfs","⤣":"nwarhk","⤤":"nearhk","⤥":"searhk","⤦":"swarhk","⤧":"nwnear","⤨":"toea","⤩":"tosa","⤪":"swnwar","⤳":"rarrc","⤳̸":"nrarrc","⤵":"cudarrr","⤶":"ldca","⤷":"rdca","⤸":"cudarrl","⤹":"larrpl","⤼":"curarrm","⤽":"cularrp","⥅":"rarrpl","⥈":"harrcir","⥉":"Uarrocir","⥊":"lurdshar","⥋":"ldrushar","⥎":"LeftRightVector","⥏":"RightUpDownVector","⥐":"DownLeftRightVector","⥑":"LeftUpDownVector","⥒":"LeftVectorBar","⥓":"RightVectorBar","⥔":"RightUpVectorBar","⥕":"RightDownVectorBar","⥖":"DownLeftVectorBar","⥗":"DownRightVectorBar","⥘":"LeftUpVectorBar","⥙":"LeftDownVectorBar","⥚":"LeftTeeVector","⥛":"RightTeeVector","⥜":"RightUpTeeVector","⥝":"RightDownTeeVector","⥞":"DownLeftTeeVector","⥟":"DownRightTeeVector","⥠":"LeftUpTeeVector","⥡":"LeftDownTeeVector","⥢":"lHar","⥣":"uHar","⥤":"rHar","⥥":"dHar","⥦":"luruhar","⥧":"ldrdhar","⥨":"ruluhar","⥩":"rdldhar","⥪":"lharul","⥫":"llhard","⥬":"rharul","⥭":"lrhard","⥮":"udhar","⥯":"duhar","⥰":"RoundImplies","⥱":"erarr","⥲":"simrarr","⥳":"larrsim","⥴":"rarrsim","⥵":"rarrap","⥶":"ltlarr","⥸":"gtrarr","⥹":"subrarr","⥻":"suplarr","⥼":"lfisht","⥽":"rfisht","⥾":"ufisht","⥿":"dfisht","⦚":"vzigzag","⦜":"vangrt","⦝":"angrtvbd","⦤":"ange","⦥":"range","⦦":"dwangle","⦧":"uwangle","⦨":"angmsdaa","⦩":"angmsdab","⦪":"angmsdac","⦫":"angmsdad","⦬":"angmsdae","⦭":"angmsdaf","⦮":"angmsdag","⦯":"angmsdah","⦰":"bemptyv","⦱":"demptyv","⦲":"cemptyv","⦳":"raemptyv","⦴":"laemptyv","⦵":"ohbar","⦶":"omid","⦷":"opar","⦹":"operp","⦻":"olcross","⦼":"odsold","⦾":"olcir","⦿":"ofcir","⧀":"olt","⧁":"ogt","⧂":"cirscir","⧃":"cirE","⧄":"solb","⧅":"bsolb","⧉":"boxbox","⧍":"trisb","⧎":"rtriltri","⧏":"LeftTriangleBar","⧏̸":"NotLeftTriangleBar","⧐":"RightTriangleBar","⧐̸":"NotRightTriangleBar","⧜":"iinfin","⧝":"infintie","⧞":"nvinfin","⧣":"eparsl","⧤":"smeparsl","⧥":"eqvparsl","⧫":"lozf","⧴":"RuleDelayed","⧶":"dsol","⨀":"xodot","⨁":"xoplus","⨂":"xotime","⨄":"xuplus","⨆":"xsqcup","⨍":"fpartint","⨐":"cirfnint","⨑":"awint","⨒":"rppolint","⨓":"scpolint","⨔":"npolint","⨕":"pointint","⨖":"quatint","⨗":"intlarhk","⨢":"pluscir","⨣":"plusacir","⨤":"simplus","⨥":"plusdu","⨦":"plussim","⨧":"plustwo","⨩":"mcomma","⨪":"minusdu","⨭":"loplus","⨮":"roplus","⨯":"Cross","⨰":"timesd","⨱":"timesbar","⨳":"smashp","⨴":"lotimes","⨵":"rotimes","⨶":"otimesas","⨷":"Otimes","⨸":"odiv","⨹":"triplus","⨺":"triminus","⨻":"tritime","⨼":"iprod","⨿":"amalg","⩀":"capdot","⩂":"ncup","⩃":"ncap","⩄":"capand","⩅":"cupor","⩆":"cupcap","⩇":"capcup","⩈":"cupbrcap","⩉":"capbrcup","⩊":"cupcup","⩋":"capcap","⩌":"ccups","⩍":"ccaps","⩐":"ccupssm","⩓":"And","⩔":"Or","⩕":"andand","⩖":"oror","⩗":"orslope","⩘":"andslope","⩚":"andv","⩛":"orv","⩜":"andd","⩝":"ord","⩟":"wedbar","⩦":"sdote","⩪":"simdot","⩭":"congdot","⩭̸":"ncongdot","⩮":"easter","⩯":"apacir","⩰":"apE","⩰̸":"napE","⩱":"eplus","⩲":"pluse","⩳":"Esim","⩷":"eDDot","⩸":"equivDD","⩹":"ltcir","⩺":"gtcir","⩻":"ltquest","⩼":"gtquest","⩽":"les","⩽̸":"nles","⩾":"ges","⩾̸":"nges","⩿":"lesdot","⪀":"gesdot","⪁":"lesdoto","⪂":"gesdoto","⪃":"lesdotor","⪄":"gesdotol","⪅":"lap","⪆":"gap","⪇":"lne","⪈":"gne","⪉":"lnap","⪊":"gnap","⪋":"lEg","⪌":"gEl","⪍":"lsime","⪎":"gsime","⪏":"lsimg","⪐":"gsiml","⪑":"lgE","⪒":"glE","⪓":"lesges","⪔":"gesles","⪕":"els","⪖":"egs","⪗":"elsdot","⪘":"egsdot","⪙":"el","⪚":"eg","⪝":"siml","⪞":"simg","⪟":"simlE","⪠":"simgE","⪡":"LessLess","⪡̸":"NotNestedLessLess","⪢":"GreaterGreater","⪢̸":"NotNestedGreaterGreater","⪤":"glj","⪥":"gla","⪦":"ltcc","⪧":"gtcc","⪨":"lescc","⪩":"gescc","⪪":"smt","⪫":"lat","⪬":"smte","⪬︀":"smtes","⪭":"late","⪭︀":"lates","⪮":"bumpE","⪯":"pre","⪯̸":"npre","⪰":"sce","⪰̸":"nsce","⪳":"prE","⪴":"scE","⪵":"prnE","⪶":"scnE","⪷":"prap","⪸":"scap","⪹":"prnap","⪺":"scnap","⪻":"Pr","⪼":"Sc","⪽":"subdot","⪾":"supdot","⪿":"subplus","⫀":"supplus","⫁":"submult","⫂":"supmult","⫃":"subedot","⫄":"supedot","⫅":"subE","⫅̸":"nsubE","⫆":"supE","⫆̸":"nsupE","⫇":"subsim","⫈":"supsim","⫋︀":"vsubnE","⫋":"subnE","⫌︀":"vsupnE","⫌":"supnE","⫏":"csub","⫐":"csup","⫑":"csube","⫒":"csupe","⫓":"subsup","⫔":"supsub","⫕":"subsub","⫖":"supsup","⫗":"suphsub","⫘":"supdsub","⫙":"forkv","⫚":"topfork","⫛":"mlcp","⫤":"Dashv","⫦":"Vdashl","⫧":"Barv","⫨":"vBar","⫩":"vBarv","⫫":"Vbar","⫬":"Not","⫭":"bNot","⫮":"rnmid","⫯":"cirmid","⫰":"midcir","⫱":"topcir","⫲":"nhpar","⫳":"parsim","⫽":"parsl","⫽⃥":"nparsl","♭":"flat","♮":"natur","♯":"sharp","¤":"curren","¢":"cent",$:"dollar","£":"pound","¥":"yen","€":"euro","¹":"sup1","½":"half","⅓":"frac13","¼":"frac14","⅕":"frac15","⅙":"frac16","⅛":"frac18","²":"sup2","⅔":"frac23","⅖":"frac25","³":"sup3","¾":"frac34","⅗":"frac35","⅜":"frac38","⅘":"frac45","⅚":"frac56","⅝":"frac58","⅞":"frac78","𝒶":"ascr","𝕒":"aopf","𝔞":"afr","𝔸":"Aopf","𝔄":"Afr","𝒜":"Ascr","ª":"ordf","á":"aacute","Á":"Aacute","à":"agrave","À":"Agrave","ă":"abreve","Ă":"Abreve","â":"acirc","Â":"Acirc","å":"aring","Å":"angst","ä":"auml","Ä":"Auml","ã":"atilde","Ã":"Atilde","ą":"aogon","Ą":"Aogon","ā":"amacr","Ā":"Amacr","æ":"aelig","Æ":"AElig","𝒷":"bscr","𝕓":"bopf","𝔟":"bfr","𝔹":"Bopf","ℬ":"Bscr","𝔅":"Bfr","𝔠":"cfr","𝒸":"cscr","𝕔":"copf","ℭ":"Cfr","𝒞":"Cscr","ℂ":"Copf","ć":"cacute","Ć":"Cacute","ĉ":"ccirc","Ĉ":"Ccirc","č":"ccaron","Č":"Ccaron","ċ":"cdot","Ċ":"Cdot","ç":"ccedil","Ç":"Ccedil","℅":"incare","𝔡":"dfr","ⅆ":"dd","𝕕":"dopf","𝒹":"dscr","𝒟":"Dscr","𝔇":"Dfr","ⅅ":"DD","𝔻":"Dopf","ď":"dcaron","Ď":"Dcaron","đ":"dstrok","Đ":"Dstrok","ð":"eth","Ð":"ETH","ⅇ":"ee","ℯ":"escr","𝔢":"efr","𝕖":"eopf","ℰ":"Escr","𝔈":"Efr","𝔼":"Eopf","é":"eacute","É":"Eacute","è":"egrave","È":"Egrave","ê":"ecirc","Ê":"Ecirc","ě":"ecaron","Ě":"Ecaron","ë":"euml","Ë":"Euml","ė":"edot","Ė":"Edot","ę":"eogon","Ę":"Eogon","ē":"emacr","Ē":"Emacr","𝔣":"ffr","𝕗":"fopf","𝒻":"fscr","𝔉":"Ffr","𝔽":"Fopf","ℱ":"Fscr","ff":"fflig","ffi":"ffilig","ffl":"ffllig","fi":"filig",fj:"fjlig","fl":"fllig","ƒ":"fnof","ℊ":"gscr","𝕘":"gopf","𝔤":"gfr","𝒢":"Gscr","𝔾":"Gopf","𝔊":"Gfr","ǵ":"gacute","ğ":"gbreve","Ğ":"Gbreve","ĝ":"gcirc","Ĝ":"Gcirc","ġ":"gdot","Ġ":"Gdot","Ģ":"Gcedil","𝔥":"hfr","ℎ":"planckh","𝒽":"hscr","𝕙":"hopf","ℋ":"Hscr","ℌ":"Hfr","ℍ":"Hopf","ĥ":"hcirc","Ĥ":"Hcirc","ℏ":"hbar","ħ":"hstrok","Ħ":"Hstrok","𝕚":"iopf","𝔦":"ifr","𝒾":"iscr","ⅈ":"ii","𝕀":"Iopf","ℐ":"Iscr","ℑ":"Im","í":"iacute","Í":"Iacute","ì":"igrave","Ì":"Igrave","î":"icirc","Î":"Icirc","ï":"iuml","Ï":"Iuml","ĩ":"itilde","Ĩ":"Itilde","İ":"Idot","į":"iogon","Į":"Iogon","ī":"imacr","Ī":"Imacr","ij":"ijlig","IJ":"IJlig","ı":"imath","𝒿":"jscr","𝕛":"jopf","𝔧":"jfr","𝒥":"Jscr","𝔍":"Jfr","𝕁":"Jopf","ĵ":"jcirc","Ĵ":"Jcirc","ȷ":"jmath","𝕜":"kopf","𝓀":"kscr","𝔨":"kfr","𝒦":"Kscr","𝕂":"Kopf","𝔎":"Kfr","ķ":"kcedil","Ķ":"Kcedil","𝔩":"lfr","𝓁":"lscr","ℓ":"ell","𝕝":"lopf","ℒ":"Lscr","𝔏":"Lfr","𝕃":"Lopf","ĺ":"lacute","Ĺ":"Lacute","ľ":"lcaron","Ľ":"Lcaron","ļ":"lcedil","Ļ":"Lcedil","ł":"lstrok","Ł":"Lstrok","ŀ":"lmidot","Ŀ":"Lmidot","𝔪":"mfr","𝕞":"mopf","𝓂":"mscr","𝔐":"Mfr","𝕄":"Mopf","ℳ":"Mscr","𝔫":"nfr","𝕟":"nopf","𝓃":"nscr","ℕ":"Nopf","𝒩":"Nscr","𝔑":"Nfr","ń":"nacute","Ń":"Nacute","ň":"ncaron","Ň":"Ncaron","ñ":"ntilde","Ñ":"Ntilde","ņ":"ncedil","Ņ":"Ncedil","№":"numero","ŋ":"eng","Ŋ":"ENG","𝕠":"oopf","𝔬":"ofr","ℴ":"oscr","𝒪":"Oscr","𝔒":"Ofr","𝕆":"Oopf","º":"ordm","ó":"oacute","Ó":"Oacute","ò":"ograve","Ò":"Ograve","ô":"ocirc","Ô":"Ocirc","ö":"ouml","Ö":"Ouml","ő":"odblac","Ő":"Odblac","õ":"otilde","Õ":"Otilde","ø":"oslash","Ø":"Oslash","ō":"omacr","Ō":"Omacr","œ":"oelig","Œ":"OElig","𝔭":"pfr","𝓅":"pscr","𝕡":"popf","ℙ":"Popf","𝔓":"Pfr","𝒫":"Pscr","𝕢":"qopf","𝔮":"qfr","𝓆":"qscr","𝒬":"Qscr","𝔔":"Qfr","ℚ":"Qopf","ĸ":"kgreen","𝔯":"rfr","𝕣":"ropf","𝓇":"rscr","ℛ":"Rscr","ℜ":"Re","ℝ":"Ropf","ŕ":"racute","Ŕ":"Racute","ř":"rcaron","Ř":"Rcaron","ŗ":"rcedil","Ŗ":"Rcedil","𝕤":"sopf","𝓈":"sscr","𝔰":"sfr","𝕊":"Sopf","𝔖":"Sfr","𝒮":"Sscr","Ⓢ":"oS","ś":"sacute","Ś":"Sacute","ŝ":"scirc","Ŝ":"Scirc","š":"scaron","Š":"Scaron","ş":"scedil","Ş":"Scedil","ß":"szlig","𝔱":"tfr","𝓉":"tscr","𝕥":"topf","𝒯":"Tscr","𝔗":"Tfr","𝕋":"Topf","ť":"tcaron","Ť":"Tcaron","ţ":"tcedil","Ţ":"Tcedil","™":"trade","ŧ":"tstrok","Ŧ":"Tstrok","𝓊":"uscr","𝕦":"uopf","𝔲":"ufr","𝕌":"Uopf","𝔘":"Ufr","𝒰":"Uscr","ú":"uacute","Ú":"Uacute","ù":"ugrave","Ù":"Ugrave","ŭ":"ubreve","Ŭ":"Ubreve","û":"ucirc","Û":"Ucirc","ů":"uring","Ů":"Uring","ü":"uuml","Ü":"Uuml","ű":"udblac","Ű":"Udblac","ũ":"utilde","Ũ":"Utilde","ų":"uogon","Ų":"Uogon","ū":"umacr","Ū":"Umacr","𝔳":"vfr","𝕧":"vopf","𝓋":"vscr","𝔙":"Vfr","𝕍":"Vopf","𝒱":"Vscr","𝕨":"wopf","𝓌":"wscr","𝔴":"wfr","𝒲":"Wscr","𝕎":"Wopf","𝔚":"Wfr","ŵ":"wcirc","Ŵ":"Wcirc","𝔵":"xfr","𝓍":"xscr","𝕩":"xopf","𝕏":"Xopf","𝔛":"Xfr","𝒳":"Xscr","𝔶":"yfr","𝓎":"yscr","𝕪":"yopf","𝒴":"Yscr","𝔜":"Yfr","𝕐":"Yopf","ý":"yacute","Ý":"Yacute","ŷ":"ycirc","Ŷ":"Ycirc","ÿ":"yuml","Ÿ":"Yuml","𝓏":"zscr","𝔷":"zfr","𝕫":"zopf","ℨ":"Zfr","ℤ":"Zopf","𝒵":"Zscr","ź":"zacute","Ź":"Zacute","ž":"zcaron","Ž":"Zcaron","ż":"zdot","Ż":"Zdot","Ƶ":"imped","þ":"thorn","Þ":"THORN","ʼn":"napos","α":"alpha","Α":"Alpha","β":"beta","Β":"Beta","γ":"gamma","Γ":"Gamma","δ":"delta","Δ":"Delta","ε":"epsi","ϵ":"epsiv","Ε":"Epsilon","ϝ":"gammad","Ϝ":"Gammad","ζ":"zeta","Ζ":"Zeta","η":"eta","Η":"Eta","θ":"theta","ϑ":"thetav","Θ":"Theta","ι":"iota","Ι":"Iota","κ":"kappa","ϰ":"kappav","Κ":"Kappa","λ":"lambda","Λ":"Lambda","μ":"mu","µ":"micro","Μ":"Mu","ν":"nu","Ν":"Nu","ξ":"xi","Ξ":"Xi","ο":"omicron","Ο":"Omicron","π":"pi","ϖ":"piv","Π":"Pi","ρ":"rho","ϱ":"rhov","Ρ":"Rho","σ":"sigma","Σ":"Sigma","ς":"sigmaf","τ":"tau","Τ":"Tau","υ":"upsi","Υ":"Upsilon","ϒ":"Upsi","φ":"phi","ϕ":"phiv","Φ":"Phi","χ":"chi","Χ":"Chi","ψ":"psi","Ψ":"Psi","ω":"omega","Ω":"ohm","а":"acy","А":"Acy","б":"bcy","Б":"Bcy","в":"vcy","В":"Vcy","г":"gcy","Г":"Gcy","ѓ":"gjcy","Ѓ":"GJcy","д":"dcy","Д":"Dcy","ђ":"djcy","Ђ":"DJcy","е":"iecy","Е":"IEcy","ё":"iocy","Ё":"IOcy","є":"jukcy","Є":"Jukcy","ж":"zhcy","Ж":"ZHcy","з":"zcy","З":"Zcy","ѕ":"dscy","Ѕ":"DScy","и":"icy","И":"Icy","і":"iukcy","І":"Iukcy","ї":"yicy","Ї":"YIcy","й":"jcy","Й":"Jcy","ј":"jsercy","Ј":"Jsercy","к":"kcy","К":"Kcy","ќ":"kjcy","Ќ":"KJcy","л":"lcy","Л":"Lcy","љ":"ljcy","Љ":"LJcy","м":"mcy","М":"Mcy","н":"ncy","Н":"Ncy","њ":"njcy","Њ":"NJcy","о":"ocy","О":"Ocy","п":"pcy","П":"Pcy","р":"rcy","Р":"Rcy","с":"scy","С":"Scy","т":"tcy","Т":"Tcy","ћ":"tshcy","Ћ":"TSHcy","у":"ucy","У":"Ucy","ў":"ubrcy","Ў":"Ubrcy","ф":"fcy","Ф":"Fcy","х":"khcy","Х":"KHcy","ц":"tscy","Ц":"TScy","ч":"chcy","Ч":"CHcy","џ":"dzcy","Џ":"DZcy","ш":"shcy","Ш":"SHcy","щ":"shchcy","Щ":"SHCHcy","ъ":"hardcy","Ъ":"HARDcy","ы":"ycy","Ы":"Ycy","ь":"softcy","Ь":"SOFTcy","э":"ecy","Э":"Ecy","ю":"yucy","Ю":"YUcy","я":"yacy","Я":"YAcy","ℵ":"aleph","ℶ":"beth","ℷ":"gimel","ℸ":"daleth"},p=/["&'<>`]/g,d={'"':""","&":"&","'":"'","<":"<",">":">","`":"`"},f=/&#(?:[xX][^a-fA-F0-9]|[^0-9xX])/,m=/[\0-\x08\x0B\x0E-\x1F\x7F-\x9F\uFDD0-\uFDEF\uFFFE\uFFFF]|[\uD83F\uD87F\uD8BF\uD8FF\uD93F\uD97F\uD9BF\uD9FF\uDA3F\uDA7F\uDABF\uDAFF\uDB3F\uDB7F\uDBBF\uDBFF][\uDFFE\uDFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF]/,g=/&(CounterClockwiseContourIntegral|DoubleLongLeftRightArrow|ClockwiseContourIntegral|NotNestedGreaterGreater|NotSquareSupersetEqual|DiacriticalDoubleAcute|NotRightTriangleEqual|NotSucceedsSlantEqual|NotPrecedesSlantEqual|CloseCurlyDoubleQuote|NegativeVeryThinSpace|DoubleContourIntegral|FilledVerySmallSquare|CapitalDifferentialD|OpenCurlyDoubleQuote|EmptyVerySmallSquare|NestedGreaterGreater|DoubleLongRightArrow|NotLeftTriangleEqual|NotGreaterSlantEqual|ReverseUpEquilibrium|DoubleLeftRightArrow|NotSquareSubsetEqual|NotDoubleVerticalBar|RightArrowLeftArrow|NotGreaterFullEqual|NotRightTriangleBar|SquareSupersetEqual|DownLeftRightVector|DoubleLongLeftArrow|leftrightsquigarrow|LeftArrowRightArrow|NegativeMediumSpace|blacktriangleright|RightDownVectorBar|PrecedesSlantEqual|RightDoubleBracket|SucceedsSlantEqual|NotLeftTriangleBar|RightTriangleEqual|SquareIntersection|RightDownTeeVector|ReverseEquilibrium|NegativeThickSpace|longleftrightarrow|Longleftrightarrow|LongLeftRightArrow|DownRightTeeVector|DownRightVectorBar|GreaterSlantEqual|SquareSubsetEqual|LeftDownVectorBar|LeftDoubleBracket|VerticalSeparator|rightleftharpoons|NotGreaterGreater|NotSquareSuperset|blacktriangleleft|blacktriangledown|NegativeThinSpace|LeftDownTeeVector|NotLessSlantEqual|leftrightharpoons|DoubleUpDownArrow|DoubleVerticalBar|LeftTriangleEqual|FilledSmallSquare|twoheadrightarrow|NotNestedLessLess|DownLeftTeeVector|DownLeftVectorBar|RightAngleBracket|NotTildeFullEqual|NotReverseElement|RightUpDownVector|DiacriticalTilde|NotSucceedsTilde|circlearrowright|NotPrecedesEqual|rightharpoondown|DoubleRightArrow|NotSucceedsEqual|NonBreakingSpace|NotRightTriangle|LessEqualGreater|RightUpTeeVector|LeftAngleBracket|GreaterFullEqual|DownArrowUpArrow|RightUpVectorBar|twoheadleftarrow|GreaterEqualLess|downharpoonright|RightTriangleBar|ntrianglerighteq|NotSupersetEqual|LeftUpDownVector|DiacriticalAcute|rightrightarrows|vartriangleright|UpArrowDownArrow|DiacriticalGrave|UnderParenthesis|EmptySmallSquare|LeftUpVectorBar|leftrightarrows|DownRightVector|downharpoonleft|trianglerighteq|ShortRightArrow|OverParenthesis|DoubleLeftArrow|DoubleDownArrow|NotSquareSubset|bigtriangledown|ntrianglelefteq|UpperRightArrow|curvearrowright|vartriangleleft|NotLeftTriangle|nleftrightarrow|LowerRightArrow|NotHumpDownHump|NotGreaterTilde|rightthreetimes|LeftUpTeeVector|NotGreaterEqual|straightepsilon|LeftTriangleBar|rightsquigarrow|ContourIntegral|rightleftarrows|CloseCurlyQuote|RightDownVector|LeftRightVector|nLeftrightarrow|leftharpoondown|circlearrowleft|SquareSuperset|OpenCurlyQuote|hookrightarrow|HorizontalLine|DiacriticalDot|NotLessGreater|ntriangleright|DoubleRightTee|InvisibleComma|InvisibleTimes|LowerLeftArrow|DownLeftVector|NotSubsetEqual|curvearrowleft|trianglelefteq|NotVerticalBar|TildeFullEqual|downdownarrows|NotGreaterLess|RightTeeVector|ZeroWidthSpace|looparrowright|LongRightArrow|doublebarwedge|ShortLeftArrow|ShortDownArrow|RightVectorBar|GreaterGreater|ReverseElement|rightharpoonup|LessSlantEqual|leftthreetimes|upharpoonright|rightarrowtail|LeftDownVector|Longrightarrow|NestedLessLess|UpperLeftArrow|nshortparallel|leftleftarrows|leftrightarrow|Leftrightarrow|LeftRightArrow|longrightarrow|upharpoonleft|RightArrowBar|ApplyFunction|LeftTeeVector|leftarrowtail|NotEqualTilde|varsubsetneqq|varsupsetneqq|RightTeeArrow|SucceedsEqual|SucceedsTilde|LeftVectorBar|SupersetEqual|hookleftarrow|DifferentialD|VerticalTilde|VeryThinSpace|blacktriangle|bigtriangleup|LessFullEqual|divideontimes|leftharpoonup|UpEquilibrium|ntriangleleft|RightTriangle|measuredangle|shortparallel|longleftarrow|Longleftarrow|LongLeftArrow|DoubleLeftTee|Poincareplane|PrecedesEqual|triangleright|DoubleUpArrow|RightUpVector|fallingdotseq|looparrowleft|PrecedesTilde|NotTildeEqual|NotTildeTilde|smallsetminus|Proportional|triangleleft|triangledown|UnderBracket|NotHumpEqual|exponentiale|ExponentialE|NotLessTilde|HilbertSpace|RightCeiling|blacklozenge|varsupsetneq|HumpDownHump|GreaterEqual|VerticalLine|LeftTeeArrow|NotLessEqual|DownTeeArrow|LeftTriangle|varsubsetneq|Intersection|NotCongruent|DownArrowBar|LeftUpVector|LeftArrowBar|risingdotseq|GreaterTilde|RoundImplies|SquareSubset|ShortUpArrow|NotSuperset|quaternions|precnapprox|backepsilon|preccurlyeq|OverBracket|blacksquare|MediumSpace|VerticalBar|circledcirc|circleddash|CircleMinus|CircleTimes|LessGreater|curlyeqprec|curlyeqsucc|diamondsuit|UpDownArrow|Updownarrow|RuleDelayed|Rrightarrow|updownarrow|RightVector|nRightarrow|nrightarrow|eqslantless|LeftCeiling|Equilibrium|SmallCircle|expectation|NotSucceeds|thickapprox|GreaterLess|SquareUnion|NotPrecedes|NotLessLess|straightphi|succnapprox|succcurlyeq|SubsetEqual|sqsupseteq|Proportion|Laplacetrf|ImaginaryI|supsetneqq|NotGreater|gtreqqless|NotElement|ThickSpace|TildeEqual|TildeTilde|Fouriertrf|rmoustache|EqualTilde|eqslantgtr|UnderBrace|LeftVector|UpArrowBar|nLeftarrow|nsubseteqq|subsetneqq|nsupseteqq|nleftarrow|succapprox|lessapprox|UpTeeArrow|upuparrows|curlywedge|lesseqqgtr|varepsilon|varnothing|RightFloor|complement|CirclePlus|sqsubseteq|Lleftarrow|circledast|RightArrow|Rightarrow|rightarrow|lmoustache|Bernoullis|precapprox|mapstoleft|mapstodown|longmapsto|dotsquare|downarrow|DoubleDot|nsubseteq|supsetneq|leftarrow|nsupseteq|subsetneq|ThinSpace|ngeqslant|subseteqq|HumpEqual|NotSubset|triangleq|NotCupCap|lesseqgtr|heartsuit|TripleDot|Leftarrow|Coproduct|Congruent|varpropto|complexes|gvertneqq|LeftArrow|LessTilde|supseteqq|MinusPlus|CircleDot|nleqslant|NotExists|gtreqless|nparallel|UnionPlus|LeftFloor|checkmark|CenterDot|centerdot|Mellintrf|gtrapprox|bigotimes|OverBrace|spadesuit|therefore|pitchfork|rationals|PlusMinus|Backslash|Therefore|DownBreve|backsimeq|backprime|DownArrow|nshortmid|Downarrow|lvertneqq|eqvparsl|imagline|imagpart|infintie|integers|Integral|intercal|LessLess|Uarrocir|intlarhk|sqsupset|angmsdaf|sqsubset|llcorner|vartheta|cupbrcap|lnapprox|Superset|SuchThat|succnsim|succneqq|angmsdag|biguplus|curlyvee|trpezium|Succeeds|NotTilde|bigwedge|angmsdah|angrtvbd|triminus|cwconint|fpartint|lrcorner|smeparsl|subseteq|urcorner|lurdshar|laemptyv|DDotrahd|approxeq|ldrushar|awconint|mapstoup|backcong|shortmid|triangle|geqslant|gesdotol|timesbar|circledR|circledS|setminus|multimap|naturals|scpolint|ncongdot|RightTee|boxminus|gnapprox|boxtimes|andslope|thicksim|angmsdaa|varsigma|cirfnint|rtriltri|angmsdab|rppolint|angmsdac|barwedge|drbkarow|clubsuit|thetasym|bsolhsub|capbrcup|dzigrarr|doteqdot|DotEqual|dotminus|UnderBar|NotEqual|realpart|otimesas|ulcorner|hksearow|hkswarow|parallel|PartialD|elinters|emptyset|plusacir|bbrktbrk|angmsdad|pointint|bigoplus|angmsdae|Precedes|bigsqcup|varkappa|notindot|supseteq|precneqq|precnsim|profalar|profline|profsurf|leqslant|lesdotor|raemptyv|subplus|notnivb|notnivc|subrarr|zigrarr|vzigzag|submult|subedot|Element|between|cirscir|larrbfs|larrsim|lotimes|lbrksld|lbrkslu|lozenge|ldrdhar|dbkarow|bigcirc|epsilon|simrarr|simplus|ltquest|Epsilon|luruhar|gtquest|maltese|npolint|eqcolon|npreceq|bigodot|ddagger|gtrless|bnequiv|harrcir|ddotseq|equivDD|backsim|demptyv|nsqsube|nsqsupe|Upsilon|nsubset|upsilon|minusdu|nsucceq|swarrow|nsupset|coloneq|searrow|boxplus|napprox|natural|asympeq|alefsym|congdot|nearrow|bigstar|diamond|supplus|tritime|LeftTee|nvinfin|triplus|NewLine|nvltrie|nvrtrie|nwarrow|nexists|Diamond|ruluhar|Implies|supmult|angzarr|suplarr|suphsub|questeq|because|digamma|Because|olcross|bemptyv|omicron|Omicron|rotimes|NoBreak|intprod|angrtvb|orderof|uwangle|suphsol|lesdoto|orslope|DownTee|realine|cudarrl|rdldhar|OverBar|supedot|lessdot|supdsub|topfork|succsim|rbrkslu|rbrksld|pertenk|cudarrr|isindot|planckh|lessgtr|pluscir|gesdoto|plussim|plustwo|lesssim|cularrp|rarrsim|Cayleys|notinva|notinvb|notinvc|UpArrow|Uparrow|uparrow|NotLess|dwangle|precsim|Product|curarrm|Cconint|dotplus|rarrbfs|ccupssm|Cedilla|cemptyv|notniva|quatint|frac35|frac38|frac45|frac56|frac58|frac78|tridot|xoplus|gacute|gammad|Gammad|lfisht|lfloor|bigcup|sqsupe|gbreve|Gbreve|lharul|sqsube|sqcups|Gcedil|apacir|llhard|lmidot|Lmidot|lmoust|andand|sqcaps|approx|Abreve|spades|circeq|tprime|divide|topcir|Assign|topbot|gesdot|divonx|xuplus|timesd|gesles|atilde|solbar|SOFTcy|loplus|timesb|lowast|lowbar|dlcorn|dlcrop|softcy|dollar|lparlt|thksim|lrhard|Atilde|lsaquo|smashp|bigvee|thinsp|wreath|bkarow|lsquor|lstrok|Lstrok|lthree|ltimes|ltlarr|DotDot|simdot|ltrPar|weierp|xsqcup|angmsd|sigmav|sigmaf|zeetrf|Zcaron|zcaron|mapsto|vsupne|thetav|cirmid|marker|mcomma|Zacute|vsubnE|there4|gtlPar|vsubne|bottom|gtrarr|SHCHcy|shchcy|midast|midcir|middot|minusb|minusd|gtrdot|bowtie|sfrown|mnplus|models|colone|seswar|Colone|mstpos|searhk|gtrsim|nacute|Nacute|boxbox|telrec|hairsp|Tcedil|nbumpe|scnsim|ncaron|Ncaron|ncedil|Ncedil|hamilt|Scedil|nearhk|hardcy|HARDcy|tcedil|Tcaron|commat|nequiv|nesear|tcaron|target|hearts|nexist|varrho|scedil|Scaron|scaron|hellip|Sacute|sacute|hercon|swnwar|compfn|rtimes|rthree|rsquor|rsaquo|zacute|wedgeq|homtht|barvee|barwed|Barwed|rpargt|horbar|conint|swarhk|roplus|nltrie|hslash|hstrok|Hstrok|rmoust|Conint|bprime|hybull|hyphen|iacute|Iacute|supsup|supsub|supsim|varphi|coprod|brvbar|agrave|Supset|supset|igrave|Igrave|notinE|Agrave|iiiint|iinfin|copysr|wedbar|Verbar|vangrt|becaus|incare|verbar|inodot|bullet|drcorn|intcal|drcrop|cularr|vellip|Utilde|bumpeq|cupcap|dstrok|Dstrok|CupCap|cupcup|cupdot|eacute|Eacute|supdot|iquest|easter|ecaron|Ecaron|ecolon|isinsv|utilde|itilde|Itilde|curarr|succeq|Bumpeq|cacute|ulcrop|nparsl|Cacute|nprcue|egrave|Egrave|nrarrc|nrarrw|subsup|subsub|nrtrie|jsercy|nsccue|Jsercy|kappav|kcedil|Kcedil|subsim|ulcorn|nsimeq|egsdot|veebar|kgreen|capand|elsdot|Subset|subset|curren|aacute|lacute|Lacute|emptyv|ntilde|Ntilde|lagran|lambda|Lambda|capcap|Ugrave|langle|subdot|emsp13|numero|emsp14|nvdash|nvDash|nVdash|nVDash|ugrave|ufisht|nvHarr|larrfs|nvlArr|larrhk|larrlp|larrpl|nvrArr|Udblac|nwarhk|larrtl|nwnear|oacute|Oacute|latail|lAtail|sstarf|lbrace|odblac|Odblac|lbrack|udblac|odsold|eparsl|lcaron|Lcaron|ograve|Ograve|lcedil|Lcedil|Aacute|ssmile|ssetmn|squarf|ldquor|capcup|ominus|cylcty|rharul|eqcirc|dagger|rfloor|rfisht|Dagger|daleth|equals|origof|capdot|equest|dcaron|Dcaron|rdquor|oslash|Oslash|otilde|Otilde|otimes|Otimes|urcrop|Ubreve|ubreve|Yacute|Uacute|uacute|Rcedil|rcedil|urcorn|parsim|Rcaron|Vdashl|rcaron|Tstrok|percnt|period|permil|Exists|yacute|rbrack|rbrace|phmmat|ccaron|Ccaron|planck|ccedil|plankv|tstrok|female|plusdo|plusdu|ffilig|plusmn|ffllig|Ccedil|rAtail|dfisht|bernou|ratail|Rarrtl|rarrtl|angsph|rarrpl|rarrlp|rarrhk|xwedge|xotime|forall|ForAll|Vvdash|vsupnE|preceq|bigcap|frac12|frac13|frac14|primes|rarrfs|prnsim|frac15|Square|frac16|square|lesdot|frac18|frac23|propto|prurel|rarrap|rangle|puncsp|frac25|Racute|qprime|racute|lesges|frac34|abreve|AElig|eqsim|utdot|setmn|urtri|Equal|Uring|seArr|uring|searr|dashv|Dashv|mumap|nabla|iogon|Iogon|sdote|sdotb|scsim|napid|napos|equiv|natur|Acirc|dblac|erarr|nbump|iprod|erDot|ucirc|awint|esdot|angrt|ncong|isinE|scnap|Scirc|scirc|ndash|isins|Ubrcy|nearr|neArr|isinv|nedot|ubrcy|acute|Ycirc|iukcy|Iukcy|xutri|nesim|caret|jcirc|Jcirc|caron|twixt|ddarr|sccue|exist|jmath|sbquo|ngeqq|angst|ccaps|lceil|ngsim|UpTee|delta|Delta|rtrif|nharr|nhArr|nhpar|rtrie|jukcy|Jukcy|kappa|rsquo|Kappa|nlarr|nlArr|TSHcy|rrarr|aogon|Aogon|fflig|xrarr|tshcy|ccirc|nleqq|filig|upsih|nless|dharl|nlsim|fjlig|ropar|nltri|dharr|robrk|roarr|fllig|fltns|roang|rnmid|subnE|subne|lAarr|trisb|Ccirc|acirc|ccups|blank|VDash|forkv|Vdash|langd|cedil|blk12|blk14|laquo|strns|diams|notin|vDash|larrb|blk34|block|disin|uplus|vdash|vBarv|aelig|starf|Wedge|check|xrArr|lates|lbarr|lBarr|notni|lbbrk|bcong|frasl|lbrke|frown|vrtri|vprop|vnsup|gamma|Gamma|wedge|xodot|bdquo|srarr|doteq|ldquo|boxdl|boxdL|gcirc|Gcirc|boxDl|boxDL|boxdr|boxdR|boxDr|TRADE|trade|rlhar|boxDR|vnsub|npart|vltri|rlarr|boxhd|boxhD|nprec|gescc|nrarr|nrArr|boxHd|boxHD|boxhu|boxhU|nrtri|boxHu|clubs|boxHU|times|colon|Colon|gimel|xlArr|Tilde|nsime|tilde|nsmid|nspar|THORN|thorn|xlarr|nsube|nsubE|thkap|xhArr|comma|nsucc|boxul|boxuL|nsupe|nsupE|gneqq|gnsim|boxUl|boxUL|grave|boxur|boxuR|boxUr|boxUR|lescc|angle|bepsi|boxvh|varpi|boxvH|numsp|Theta|gsime|gsiml|theta|boxVh|boxVH|boxvl|gtcir|gtdot|boxvL|boxVl|boxVL|crarr|cross|Cross|nvsim|boxvr|nwarr|nwArr|sqsup|dtdot|Uogon|lhard|lharu|dtrif|ocirc|Ocirc|lhblk|duarr|odash|sqsub|Hacek|sqcup|llarr|duhar|oelig|OElig|ofcir|boxvR|uogon|lltri|boxVr|csube|uuarr|ohbar|csupe|ctdot|olarr|olcir|harrw|oline|sqcap|omacr|Omacr|omega|Omega|boxVR|aleph|lneqq|lnsim|loang|loarr|rharu|lobrk|hcirc|operp|oplus|rhard|Hcirc|orarr|Union|order|ecirc|Ecirc|cuepr|szlig|cuesc|breve|reals|eDDot|Breve|hoarr|lopar|utrif|rdquo|Umacr|umacr|efDot|swArr|ultri|alpha|rceil|ovbar|swarr|Wcirc|wcirc|smtes|smile|bsemi|lrarr|aring|parsl|lrhar|bsime|uhblk|lrtri|cupor|Aring|uharr|uharl|slarr|rbrke|bsolb|lsime|rbbrk|RBarr|lsimg|phone|rBarr|rbarr|icirc|lsquo|Icirc|emacr|Emacr|ratio|simne|plusb|simlE|simgE|simeq|pluse|ltcir|ltdot|empty|xharr|xdtri|iexcl|Alpha|ltrie|rarrw|pound|ltrif|xcirc|bumpe|prcue|bumpE|asymp|amacr|cuvee|Sigma|sigma|iiint|udhar|iiota|ijlig|IJlig|supnE|imacr|Imacr|prime|Prime|image|prnap|eogon|Eogon|rarrc|mdash|mDDot|cuwed|imath|supne|imped|Amacr|udarr|prsim|micro|rarrb|cwint|raquo|infin|eplus|range|rangd|Ucirc|radic|minus|amalg|veeeq|rAarr|epsiv|ycirc|quest|sharp|quot|zwnj|Qscr|race|qscr|Qopf|qopf|qint|rang|Rang|Zscr|zscr|Zopf|zopf|rarr|rArr|Rarr|Pscr|pscr|prop|prod|prnE|prec|ZHcy|zhcy|prap|Zeta|zeta|Popf|popf|Zdot|plus|zdot|Yuml|yuml|phiv|YUcy|yucy|Yscr|yscr|perp|Yopf|yopf|part|para|YIcy|Ouml|rcub|yicy|YAcy|rdca|ouml|osol|Oscr|rdsh|yacy|real|oscr|xvee|andd|rect|andv|Xscr|oror|ordm|ordf|xscr|ange|aopf|Aopf|rHar|Xopf|opar|Oopf|xopf|xnis|rhov|oopf|omid|xmap|oint|apid|apos|ogon|ascr|Ascr|odot|odiv|xcup|xcap|ocir|oast|nvlt|nvle|nvgt|nvge|nvap|Wscr|wscr|auml|ntlg|ntgl|nsup|nsub|nsim|Nscr|nscr|nsce|Wopf|ring|npre|wopf|npar|Auml|Barv|bbrk|Nopf|nopf|nmid|nLtv|beta|ropf|Ropf|Beta|beth|nles|rpar|nleq|bnot|bNot|nldr|NJcy|rscr|Rscr|Vscr|vscr|rsqb|njcy|bopf|nisd|Bopf|rtri|Vopf|nGtv|ngtr|vopf|boxh|boxH|boxv|nges|ngeq|boxV|bscr|scap|Bscr|bsim|Vert|vert|bsol|bull|bump|caps|cdot|ncup|scnE|ncap|nbsp|napE|Cdot|cent|sdot|Vbar|nang|vBar|chcy|Mscr|mscr|sect|semi|CHcy|Mopf|mopf|sext|circ|cire|mldr|mlcp|cirE|comp|shcy|SHcy|vArr|varr|cong|copf|Copf|copy|COPY|malt|male|macr|lvnE|cscr|ltri|sime|ltcc|simg|Cscr|siml|csub|Uuml|lsqb|lsim|uuml|csup|Lscr|lscr|utri|smid|lpar|cups|smte|lozf|darr|Lopf|Uscr|solb|lopf|sopf|Sopf|lneq|uscr|spar|dArr|lnap|Darr|dash|Sqrt|LJcy|ljcy|lHar|dHar|Upsi|upsi|diam|lesg|djcy|DJcy|leqq|dopf|Dopf|dscr|Dscr|dscy|ldsh|ldca|squf|DScy|sscr|Sscr|dsol|lcub|late|star|Star|Uopf|Larr|lArr|larr|uopf|dtri|dzcy|sube|subE|Lang|lang|Kscr|kscr|Kopf|kopf|KJcy|kjcy|KHcy|khcy|DZcy|ecir|edot|eDot|Jscr|jscr|succ|Jopf|jopf|Edot|uHar|emsp|ensp|Iuml|iuml|eopf|isin|Iscr|iscr|Eopf|epar|sung|epsi|escr|sup1|sup2|sup3|Iota|iota|supe|supE|Iopf|iopf|IOcy|iocy|Escr|esim|Esim|imof|Uarr|QUOT|uArr|uarr|euml|IEcy|iecy|Idot|Euml|euro|excl|Hscr|hscr|Hopf|hopf|TScy|tscy|Tscr|hbar|tscr|flat|tbrk|fnof|hArr|harr|half|fopf|Fopf|tdot|gvnE|fork|trie|gtcc|fscr|Fscr|gdot|gsim|Gscr|gscr|Gopf|gopf|gneq|Gdot|tosa|gnap|Topf|topf|geqq|toea|GJcy|gjcy|tint|gesl|mid|Sfr|ggg|top|ges|gla|glE|glj|geq|gne|gEl|gel|gnE|Gcy|gcy|gap|Tfr|tfr|Tcy|tcy|Hat|Tau|Ffr|tau|Tab|hfr|Hfr|ffr|Fcy|fcy|icy|Icy|iff|ETH|eth|ifr|Ifr|Eta|eta|int|Int|Sup|sup|ucy|Ucy|Sum|sum|jcy|ENG|ufr|Ufr|eng|Jcy|jfr|els|ell|egs|Efr|efr|Jfr|uml|kcy|Kcy|Ecy|ecy|kfr|Kfr|lap|Sub|sub|lat|lcy|Lcy|leg|Dot|dot|lEg|leq|les|squ|div|die|lfr|Lfr|lgE|Dfr|dfr|Del|deg|Dcy|dcy|lne|lnE|sol|loz|smt|Cup|lrm|cup|lsh|Lsh|sim|shy|map|Map|mcy|Mcy|mfr|Mfr|mho|gfr|Gfr|sfr|cir|Chi|chi|nap|Cfr|vcy|Vcy|cfr|Scy|scy|ncy|Ncy|vee|Vee|Cap|cap|nfr|scE|sce|Nfr|nge|ngE|nGg|vfr|Vfr|ngt|bot|nGt|nis|niv|Rsh|rsh|nle|nlE|bne|Bfr|bfr|nLl|nlt|nLt|Bcy|bcy|not|Not|rlm|wfr|Wfr|npr|nsc|num|ocy|ast|Ocy|ofr|xfr|Xfr|Ofr|ogt|ohm|apE|olt|Rho|ape|rho|Rfr|rfr|ord|REG|ang|reg|orv|And|and|AMP|Rcy|amp|Afr|ycy|Ycy|yen|yfr|Yfr|rcy|par|pcy|Pcy|pfr|Pfr|phi|Phi|afr|Acy|acy|zcy|Zcy|piv|acE|acd|zfr|Zfr|pre|prE|psi|Psi|qfr|Qfr|zwj|Or|ge|Gg|gt|gg|el|oS|lt|Lt|LT|Re|lg|gl|eg|ne|Im|it|le|DD|wp|wr|nu|Nu|dd|lE|Sc|sc|pi|Pi|ee|af|ll|Ll|rx|gE|xi|pm|Xi|ic|pr|Pr|in|ni|mp|mu|ac|Mu|or|ap|Gt|GT|ii);|&(Aacute|Agrave|Atilde|Ccedil|Eacute|Egrave|Iacute|Igrave|Ntilde|Oacute|Ograve|Oslash|Otilde|Uacute|Ugrave|Yacute|aacute|agrave|atilde|brvbar|ccedil|curren|divide|eacute|egrave|frac12|frac14|frac34|iacute|igrave|iquest|middot|ntilde|oacute|ograve|oslash|otilde|plusmn|uacute|ugrave|yacute|AElig|Acirc|Aring|Ecirc|Icirc|Ocirc|THORN|Ucirc|acirc|acute|aelig|aring|cedil|ecirc|icirc|iexcl|laquo|micro|ocirc|pound|raquo|szlig|thorn|times|ucirc|Auml|COPY|Euml|Iuml|Ouml|QUOT|Uuml|auml|cent|copy|euml|iuml|macr|nbsp|ordf|ordm|ouml|para|quot|sect|sup1|sup2|sup3|uuml|yuml|AMP|ETH|REG|amp|deg|eth|not|reg|shy|uml|yen|GT|LT|gt|lt)(?!;)([=a-zA-Z0-9]?)|&#([0-9]+)(;?)|&#[xX]([a-fA-F0-9]+)(;?)|&([0-9a-zA-Z]+)/g,h={aacute:"á",Aacute:"Á",abreve:"ă",Abreve:"Ă",ac:"∾",acd:"∿",acE:"∾̳",acirc:"â",Acirc:"Â",acute:"´",acy:"а",Acy:"А",aelig:"æ",AElig:"Æ",af:"",afr:"𝔞",Afr:"𝔄",agrave:"à",Agrave:"À",alefsym:"ℵ",aleph:"ℵ",alpha:"α",Alpha:"Α",amacr:"ā",Amacr:"Ā",amalg:"⨿",amp:"&",AMP:"&",and:"∧",And:"⩓",andand:"⩕",andd:"⩜",andslope:"⩘",andv:"⩚",ang:"∠",ange:"⦤",angle:"∠",angmsd:"∡",angmsdaa:"⦨",angmsdab:"⦩",angmsdac:"⦪",angmsdad:"⦫",angmsdae:"⦬",angmsdaf:"⦭",angmsdag:"⦮",angmsdah:"⦯",angrt:"∟",angrtvb:"⊾",angrtvbd:"⦝",angsph:"∢",angst:"Å",angzarr:"⍼",aogon:"ą",Aogon:"Ą",aopf:"𝕒",Aopf:"𝔸",ap:"≈",apacir:"⩯",ape:"≊",apE:"⩰",apid:"≋",apos:"'",ApplyFunction:"",approx:"≈",approxeq:"≊",aring:"å",Aring:"Å",ascr:"𝒶",Ascr:"𝒜",Assign:"≔",ast:"*",asymp:"≈",asympeq:"≍",atilde:"ã",Atilde:"Ã",auml:"ä",Auml:"Ä",awconint:"∳",awint:"⨑",backcong:"≌",backepsilon:"϶",backprime:"‵",backsim:"∽",backsimeq:"⋍",Backslash:"∖",Barv:"⫧",barvee:"⊽",barwed:"⌅",Barwed:"⌆",barwedge:"⌅",bbrk:"⎵",bbrktbrk:"⎶",bcong:"≌",bcy:"б",Bcy:"Б",bdquo:"„",becaus:"∵",because:"∵",Because:"∵",bemptyv:"⦰",bepsi:"϶",bernou:"ℬ",Bernoullis:"ℬ",beta:"β",Beta:"Β",beth:"ℶ",between:"≬",bfr:"𝔟",Bfr:"𝔅",bigcap:"⋂",bigcirc:"◯",bigcup:"⋃",bigodot:"⨀",bigoplus:"⨁",bigotimes:"⨂",bigsqcup:"⨆",bigstar:"★",bigtriangledown:"▽",bigtriangleup:"△",biguplus:"⨄",bigvee:"⋁",bigwedge:"⋀",bkarow:"⤍",blacklozenge:"⧫",blacksquare:"▪",blacktriangle:"▴",blacktriangledown:"▾",blacktriangleleft:"◂",blacktriangleright:"▸",blank:"␣",blk12:"▒",blk14:"░",blk34:"▓",block:"█",bne:"=⃥",bnequiv:"≡⃥",bnot:"⌐",bNot:"⫭",bopf:"𝕓",Bopf:"𝔹",bot:"⊥",bottom:"⊥",bowtie:"⋈",boxbox:"⧉",boxdl:"┐",boxdL:"╕",boxDl:"╖",boxDL:"╗",boxdr:"┌",boxdR:"╒",boxDr:"╓",boxDR:"╔",boxh:"─",boxH:"═",boxhd:"┬",boxhD:"╥",boxHd:"╤",boxHD:"╦",boxhu:"┴",boxhU:"╨",boxHu:"╧",boxHU:"╩",boxminus:"⊟",boxplus:"⊞",boxtimes:"⊠",boxul:"┘",boxuL:"╛",boxUl:"╜",boxUL:"╝",boxur:"└",boxuR:"╘",boxUr:"╙",boxUR:"╚",boxv:"│",boxV:"║",boxvh:"┼",boxvH:"╪",boxVh:"╫",boxVH:"╬",boxvl:"┤",boxvL:"╡",boxVl:"╢",boxVL:"╣",boxvr:"├",boxvR:"╞",boxVr:"╟",boxVR:"╠",bprime:"‵",breve:"˘",Breve:"˘",brvbar:"¦",bscr:"𝒷",Bscr:"ℬ",bsemi:"⁏",bsim:"∽",bsime:"⋍",bsol:"\\",bsolb:"⧅",bsolhsub:"⟈",bull:"•",bullet:"•",bump:"≎",bumpe:"≏",bumpE:"⪮",bumpeq:"≏",Bumpeq:"≎",cacute:"ć",Cacute:"Ć",cap:"∩",Cap:"⋒",capand:"⩄",capbrcup:"⩉",capcap:"⩋",capcup:"⩇",capdot:"⩀",CapitalDifferentialD:"ⅅ",caps:"∩︀",caret:"⁁",caron:"ˇ",Cayleys:"ℭ",ccaps:"⩍",ccaron:"č",Ccaron:"Č",ccedil:"ç",Ccedil:"Ç",ccirc:"ĉ",Ccirc:"Ĉ",Cconint:"∰",ccups:"⩌",ccupssm:"⩐",cdot:"ċ",Cdot:"Ċ",cedil:"¸",Cedilla:"¸",cemptyv:"⦲",cent:"¢",centerdot:"·",CenterDot:"·",cfr:"𝔠",Cfr:"ℭ",chcy:"ч",CHcy:"Ч",check:"✓",checkmark:"✓",chi:"χ",Chi:"Χ",cir:"○",circ:"ˆ",circeq:"≗",circlearrowleft:"↺",circlearrowright:"↻",circledast:"⊛",circledcirc:"⊚",circleddash:"⊝",CircleDot:"⊙",circledR:"®",circledS:"Ⓢ",CircleMinus:"⊖",CirclePlus:"⊕",CircleTimes:"⊗",cire:"≗",cirE:"⧃",cirfnint:"⨐",cirmid:"⫯",cirscir:"⧂",ClockwiseContourIntegral:"∲",CloseCurlyDoubleQuote:"”",CloseCurlyQuote:"’",clubs:"♣",clubsuit:"♣",colon:":",Colon:"∷",colone:"≔",Colone:"⩴",coloneq:"≔",comma:",",commat:"@",comp:"∁",compfn:"∘",complement:"∁",complexes:"ℂ",cong:"≅",congdot:"⩭",Congruent:"≡",conint:"∮",Conint:"∯",ContourIntegral:"∮",copf:"𝕔",Copf:"ℂ",coprod:"∐",Coproduct:"∐",copy:"©",COPY:"©",copysr:"℗",CounterClockwiseContourIntegral:"∳",crarr:"↵",cross:"✗",Cross:"⨯",cscr:"𝒸",Cscr:"𝒞",csub:"⫏",csube:"⫑",csup:"⫐",csupe:"⫒",ctdot:"⋯",cudarrl:"⤸",cudarrr:"⤵",cuepr:"⋞",cuesc:"⋟",cularr:"↶",cularrp:"⤽",cup:"∪",Cup:"⋓",cupbrcap:"⩈",cupcap:"⩆",CupCap:"≍",cupcup:"⩊",cupdot:"⊍",cupor:"⩅",cups:"∪︀",curarr:"↷",curarrm:"⤼",curlyeqprec:"⋞",curlyeqsucc:"⋟",curlyvee:"⋎",curlywedge:"⋏",curren:"¤",curvearrowleft:"↶",curvearrowright:"↷",cuvee:"⋎",cuwed:"⋏",cwconint:"∲",cwint:"∱",cylcty:"⌭",dagger:"†",Dagger:"‡",daleth:"ℸ",darr:"↓",dArr:"⇓",Darr:"↡",dash:"‐",dashv:"⊣",Dashv:"⫤",dbkarow:"⤏",dblac:"˝",dcaron:"ď",Dcaron:"Ď",dcy:"д",Dcy:"Д",dd:"ⅆ",DD:"ⅅ",ddagger:"‡",ddarr:"⇊",DDotrahd:"⤑",ddotseq:"⩷",deg:"°",Del:"∇",delta:"δ",Delta:"Δ",demptyv:"⦱",dfisht:"⥿",dfr:"𝔡",Dfr:"𝔇",dHar:"⥥",dharl:"⇃",dharr:"⇂",DiacriticalAcute:"´",DiacriticalDot:"˙",DiacriticalDoubleAcute:"˝",DiacriticalGrave:"`",DiacriticalTilde:"˜",diam:"⋄",diamond:"⋄",Diamond:"⋄",diamondsuit:"♦",diams:"♦",die:"¨",DifferentialD:"ⅆ",digamma:"ϝ",disin:"⋲",div:"÷",divide:"÷",divideontimes:"⋇",divonx:"⋇",djcy:"ђ",DJcy:"Ђ",dlcorn:"⌞",dlcrop:"⌍",dollar:"$",dopf:"𝕕",Dopf:"𝔻",dot:"˙",Dot:"¨",DotDot:"⃜",doteq:"≐",doteqdot:"≑",DotEqual:"≐",dotminus:"∸",dotplus:"∔",dotsquare:"⊡",doublebarwedge:"⌆",DoubleContourIntegral:"∯",DoubleDot:"¨",DoubleDownArrow:"⇓",DoubleLeftArrow:"⇐",DoubleLeftRightArrow:"⇔",DoubleLeftTee:"⫤",DoubleLongLeftArrow:"⟸",DoubleLongLeftRightArrow:"⟺",DoubleLongRightArrow:"⟹",DoubleRightArrow:"⇒",DoubleRightTee:"⊨",DoubleUpArrow:"⇑",DoubleUpDownArrow:"⇕",DoubleVerticalBar:"∥",downarrow:"↓",Downarrow:"⇓",DownArrow:"↓",DownArrowBar:"⤓",DownArrowUpArrow:"⇵",DownBreve:"̑",downdownarrows:"⇊",downharpoonleft:"⇃",downharpoonright:"⇂",DownLeftRightVector:"⥐",DownLeftTeeVector:"⥞",DownLeftVector:"↽",DownLeftVectorBar:"⥖",DownRightTeeVector:"⥟",DownRightVector:"⇁",DownRightVectorBar:"⥗",DownTee:"⊤",DownTeeArrow:"↧",drbkarow:"⤐",drcorn:"⌟",drcrop:"⌌",dscr:"𝒹",Dscr:"𝒟",dscy:"ѕ",DScy:"Ѕ",dsol:"⧶",dstrok:"đ",Dstrok:"Đ",dtdot:"⋱",dtri:"▿",dtrif:"▾",duarr:"⇵",duhar:"⥯",dwangle:"⦦",dzcy:"џ",DZcy:"Џ",dzigrarr:"⟿",eacute:"é",Eacute:"É",easter:"⩮",ecaron:"ě",Ecaron:"Ě",ecir:"≖",ecirc:"ê",Ecirc:"Ê",ecolon:"≕",ecy:"э",Ecy:"Э",eDDot:"⩷",edot:"ė",eDot:"≑",Edot:"Ė",ee:"ⅇ",efDot:"≒",efr:"𝔢",Efr:"𝔈",eg:"⪚",egrave:"è",Egrave:"È",egs:"⪖",egsdot:"⪘",el:"⪙",Element:"∈",elinters:"⏧",ell:"ℓ",els:"⪕",elsdot:"⪗",emacr:"ē",Emacr:"Ē",empty:"∅",emptyset:"∅",EmptySmallSquare:"◻",emptyv:"∅",EmptyVerySmallSquare:"▫",emsp:" ",emsp13:" ",emsp14:" ",eng:"ŋ",ENG:"Ŋ",ensp:" ",eogon:"ę",Eogon:"Ę",eopf:"𝕖",Eopf:"𝔼",epar:"⋕",eparsl:"⧣",eplus:"⩱",epsi:"ε",epsilon:"ε",Epsilon:"Ε",epsiv:"ϵ",eqcirc:"≖",eqcolon:"≕",eqsim:"≂",eqslantgtr:"⪖",eqslantless:"⪕",Equal:"⩵",equals:"=",EqualTilde:"≂",equest:"≟",Equilibrium:"⇌",equiv:"≡",equivDD:"⩸",eqvparsl:"⧥",erarr:"⥱",erDot:"≓",escr:"ℯ",Escr:"ℰ",esdot:"≐",esim:"≂",Esim:"⩳",eta:"η",Eta:"Η",eth:"ð",ETH:"Ð",euml:"ë",Euml:"Ë",euro:"€",excl:"!",exist:"∃",Exists:"∃",expectation:"ℰ",exponentiale:"ⅇ",ExponentialE:"ⅇ",fallingdotseq:"≒",fcy:"ф",Fcy:"Ф",female:"♀",ffilig:"ffi",fflig:"ff",ffllig:"ffl",ffr:"𝔣",Ffr:"𝔉",filig:"fi",FilledSmallSquare:"◼",FilledVerySmallSquare:"▪",fjlig:"fj",flat:"♭",fllig:"fl",fltns:"▱",fnof:"ƒ",fopf:"𝕗",Fopf:"𝔽",forall:"∀",ForAll:"∀",fork:"⋔",forkv:"⫙",Fouriertrf:"ℱ",fpartint:"⨍",frac12:"½",frac13:"⅓",frac14:"¼",frac15:"⅕",frac16:"⅙",frac18:"⅛",frac23:"⅔",frac25:"⅖",frac34:"¾",frac35:"⅗",frac38:"⅜",frac45:"⅘",frac56:"⅚",frac58:"⅝",frac78:"⅞",frasl:"⁄",frown:"⌢",fscr:"𝒻",Fscr:"ℱ",gacute:"ǵ",gamma:"γ",Gamma:"Γ",gammad:"ϝ",Gammad:"Ϝ",gap:"⪆",gbreve:"ğ",Gbreve:"Ğ",Gcedil:"Ģ",gcirc:"ĝ",Gcirc:"Ĝ",gcy:"г",Gcy:"Г",gdot:"ġ",Gdot:"Ġ",ge:"≥",gE:"≧",gel:"⋛",gEl:"⪌",geq:"≥",geqq:"≧",geqslant:"⩾",ges:"⩾",gescc:"⪩",gesdot:"⪀",gesdoto:"⪂",gesdotol:"⪄",gesl:"⋛︀",gesles:"⪔",gfr:"𝔤",Gfr:"𝔊",gg:"≫",Gg:"⋙",ggg:"⋙",gimel:"ℷ",gjcy:"ѓ",GJcy:"Ѓ",gl:"≷",gla:"⪥",glE:"⪒",glj:"⪤",gnap:"⪊",gnapprox:"⪊",gne:"⪈",gnE:"≩",gneq:"⪈",gneqq:"≩",gnsim:"⋧",gopf:"𝕘",Gopf:"𝔾",grave:"`",GreaterEqual:"≥",GreaterEqualLess:"⋛",GreaterFullEqual:"≧",GreaterGreater:"⪢",GreaterLess:"≷",GreaterSlantEqual:"⩾",GreaterTilde:"≳",gscr:"ℊ",Gscr:"𝒢",gsim:"≳",gsime:"⪎",gsiml:"⪐",gt:">",Gt:"≫",GT:">",gtcc:"⪧",gtcir:"⩺",gtdot:"⋗",gtlPar:"⦕",gtquest:"⩼",gtrapprox:"⪆",gtrarr:"⥸",gtrdot:"⋗",gtreqless:"⋛",gtreqqless:"⪌",gtrless:"≷",gtrsim:"≳",gvertneqq:"≩︀",gvnE:"≩︀",Hacek:"ˇ",hairsp:" ",half:"½",hamilt:"ℋ",hardcy:"ъ",HARDcy:"Ъ",harr:"↔",hArr:"⇔",harrcir:"⥈",harrw:"↭",Hat:"^",hbar:"ℏ",hcirc:"ĥ",Hcirc:"Ĥ",hearts:"♥",heartsuit:"♥",hellip:"…",hercon:"⊹",hfr:"𝔥",Hfr:"ℌ",HilbertSpace:"ℋ",hksearow:"⤥",hkswarow:"⤦",hoarr:"⇿",homtht:"∻",hookleftarrow:"↩",hookrightarrow:"↪",hopf:"𝕙",Hopf:"ℍ",horbar:"―",HorizontalLine:"─",hscr:"𝒽",Hscr:"ℋ",hslash:"ℏ",hstrok:"ħ",Hstrok:"Ħ",HumpDownHump:"≎",HumpEqual:"≏",hybull:"⁃",hyphen:"‐",iacute:"í",Iacute:"Í",ic:"",icirc:"î",Icirc:"Î",icy:"и",Icy:"И",Idot:"İ",iecy:"е",IEcy:"Е",iexcl:"¡",iff:"⇔",ifr:"𝔦",Ifr:"ℑ",igrave:"ì",Igrave:"Ì",ii:"ⅈ",iiiint:"⨌",iiint:"∭",iinfin:"⧜",iiota:"℩",ijlig:"ij",IJlig:"IJ",Im:"ℑ",imacr:"ī",Imacr:"Ī",image:"ℑ",ImaginaryI:"ⅈ",imagline:"ℐ",imagpart:"ℑ",imath:"ı",imof:"⊷",imped:"Ƶ",Implies:"⇒",in:"∈",incare:"℅",infin:"∞",infintie:"⧝",inodot:"ı",int:"∫",Int:"∬",intcal:"⊺",integers:"ℤ",Integral:"∫",intercal:"⊺",Intersection:"⋂",intlarhk:"⨗",intprod:"⨼",InvisibleComma:"",InvisibleTimes:"",iocy:"ё",IOcy:"Ё",iogon:"į",Iogon:"Į",iopf:"𝕚",Iopf:"𝕀",iota:"ι",Iota:"Ι",iprod:"⨼",iquest:"¿",iscr:"𝒾",Iscr:"ℐ",isin:"∈",isindot:"⋵",isinE:"⋹",isins:"⋴",isinsv:"⋳",isinv:"∈",it:"",itilde:"ĩ",Itilde:"Ĩ",iukcy:"і",Iukcy:"І",iuml:"ï",Iuml:"Ï",jcirc:"ĵ",Jcirc:"Ĵ",jcy:"й",Jcy:"Й",jfr:"𝔧",Jfr:"𝔍",jmath:"ȷ",jopf:"𝕛",Jopf:"𝕁",jscr:"𝒿",Jscr:"𝒥",jsercy:"ј",Jsercy:"Ј",jukcy:"є",Jukcy:"Є",kappa:"κ",Kappa:"Κ",kappav:"ϰ",kcedil:"ķ",Kcedil:"Ķ",kcy:"к",Kcy:"К",kfr:"𝔨",Kfr:"𝔎",kgreen:"ĸ",khcy:"х",KHcy:"Х",kjcy:"ќ",KJcy:"Ќ",kopf:"𝕜",Kopf:"𝕂",kscr:"𝓀",Kscr:"𝒦",lAarr:"⇚",lacute:"ĺ",Lacute:"Ĺ",laemptyv:"⦴",lagran:"ℒ",lambda:"λ",Lambda:"Λ",lang:"⟨",Lang:"⟪",langd:"⦑",langle:"⟨",lap:"⪅",Laplacetrf:"ℒ",laquo:"«",larr:"←",lArr:"⇐",Larr:"↞",larrb:"⇤",larrbfs:"⤟",larrfs:"⤝",larrhk:"↩",larrlp:"↫",larrpl:"⤹",larrsim:"⥳",larrtl:"↢",lat:"⪫",latail:"⤙",lAtail:"⤛",late:"⪭",lates:"⪭︀",lbarr:"⤌",lBarr:"⤎",lbbrk:"❲",lbrace:"{",lbrack:"[",lbrke:"⦋",lbrksld:"⦏",lbrkslu:"⦍",lcaron:"ľ",Lcaron:"Ľ",lcedil:"ļ",Lcedil:"Ļ",lceil:"⌈",lcub:"{",lcy:"л",Lcy:"Л",ldca:"⤶",ldquo:"“",ldquor:"„",ldrdhar:"⥧",ldrushar:"⥋",ldsh:"↲",le:"≤",lE:"≦",LeftAngleBracket:"⟨",leftarrow:"←",Leftarrow:"⇐",LeftArrow:"←",LeftArrowBar:"⇤",LeftArrowRightArrow:"⇆",leftarrowtail:"↢",LeftCeiling:"⌈",LeftDoubleBracket:"⟦",LeftDownTeeVector:"⥡",LeftDownVector:"⇃",LeftDownVectorBar:"⥙",LeftFloor:"⌊",leftharpoondown:"↽",leftharpoonup:"↼",leftleftarrows:"⇇",leftrightarrow:"↔",Leftrightarrow:"⇔",LeftRightArrow:"↔",leftrightarrows:"⇆",leftrightharpoons:"⇋",leftrightsquigarrow:"↭",LeftRightVector:"⥎",LeftTee:"⊣",LeftTeeArrow:"↤",LeftTeeVector:"⥚",leftthreetimes:"⋋",LeftTriangle:"⊲",LeftTriangleBar:"⧏",LeftTriangleEqual:"⊴",LeftUpDownVector:"⥑",LeftUpTeeVector:"⥠",LeftUpVector:"↿",LeftUpVectorBar:"⥘",LeftVector:"↼",LeftVectorBar:"⥒",leg:"⋚",lEg:"⪋",leq:"≤",leqq:"≦",leqslant:"⩽",les:"⩽",lescc:"⪨",lesdot:"⩿",lesdoto:"⪁",lesdotor:"⪃",lesg:"⋚︀",lesges:"⪓",lessapprox:"⪅",lessdot:"⋖",lesseqgtr:"⋚",lesseqqgtr:"⪋",LessEqualGreater:"⋚",LessFullEqual:"≦",LessGreater:"≶",lessgtr:"≶",LessLess:"⪡",lesssim:"≲",LessSlantEqual:"⩽",LessTilde:"≲",lfisht:"⥼",lfloor:"⌊",lfr:"𝔩",Lfr:"𝔏",lg:"≶",lgE:"⪑",lHar:"⥢",lhard:"↽",lharu:"↼",lharul:"⥪",lhblk:"▄",ljcy:"љ",LJcy:"Љ",ll:"≪",Ll:"⋘",llarr:"⇇",llcorner:"⌞",Lleftarrow:"⇚",llhard:"⥫",lltri:"◺",lmidot:"ŀ",Lmidot:"Ŀ",lmoust:"⎰",lmoustache:"⎰",lnap:"⪉",lnapprox:"⪉",lne:"⪇",lnE:"≨",lneq:"⪇",lneqq:"≨",lnsim:"⋦",loang:"⟬",loarr:"⇽",lobrk:"⟦",longleftarrow:"⟵",Longleftarrow:"⟸",LongLeftArrow:"⟵",longleftrightarrow:"⟷",Longleftrightarrow:"⟺",LongLeftRightArrow:"⟷",longmapsto:"⟼",longrightarrow:"⟶",Longrightarrow:"⟹",LongRightArrow:"⟶",looparrowleft:"↫",looparrowright:"↬",lopar:"⦅",lopf:"𝕝",Lopf:"𝕃",loplus:"⨭",lotimes:"⨴",lowast:"∗",lowbar:"_",LowerLeftArrow:"↙",LowerRightArrow:"↘",loz:"◊",lozenge:"◊",lozf:"⧫",lpar:"(",lparlt:"⦓",lrarr:"⇆",lrcorner:"⌟",lrhar:"⇋",lrhard:"⥭",lrm:"",lrtri:"⊿",lsaquo:"‹",lscr:"𝓁",Lscr:"ℒ",lsh:"↰",Lsh:"↰",lsim:"≲",lsime:"⪍",lsimg:"⪏",lsqb:"[",lsquo:"‘",lsquor:"‚",lstrok:"ł",Lstrok:"Ł",lt:"<",Lt:"≪",LT:"<",ltcc:"⪦",ltcir:"⩹",ltdot:"⋖",lthree:"⋋",ltimes:"⋉",ltlarr:"⥶",ltquest:"⩻",ltri:"◃",ltrie:"⊴",ltrif:"◂",ltrPar:"⦖",lurdshar:"⥊",luruhar:"⥦",lvertneqq:"≨︀",lvnE:"≨︀",macr:"¯",male:"♂",malt:"✠",maltese:"✠",map:"↦",Map:"⤅",mapsto:"↦",mapstodown:"↧",mapstoleft:"↤",mapstoup:"↥",marker:"▮",mcomma:"⨩",mcy:"м",Mcy:"М",mdash:"—",mDDot:"∺",measuredangle:"∡",MediumSpace:" ",Mellintrf:"ℳ",mfr:"𝔪",Mfr:"𝔐",mho:"℧",micro:"µ",mid:"∣",midast:"*",midcir:"⫰",middot:"·",minus:"−",minusb:"⊟",minusd:"∸",minusdu:"⨪",MinusPlus:"∓",mlcp:"⫛",mldr:"…",mnplus:"∓",models:"⊧",mopf:"𝕞",Mopf:"𝕄",mp:"∓",mscr:"𝓂",Mscr:"ℳ",mstpos:"∾",mu:"μ",Mu:"Μ",multimap:"⊸",mumap:"⊸",nabla:"∇",nacute:"ń",Nacute:"Ń",nang:"∠⃒",nap:"≉",napE:"⩰̸",napid:"≋̸",napos:"ʼn",napprox:"≉",natur:"♮",natural:"♮",naturals:"ℕ",nbsp:" ",nbump:"≎̸",nbumpe:"≏̸",ncap:"⩃",ncaron:"ň",Ncaron:"Ň",ncedil:"ņ",Ncedil:"Ņ",ncong:"≇",ncongdot:"⩭̸",ncup:"⩂",ncy:"н",Ncy:"Н",ndash:"–",ne:"≠",nearhk:"⤤",nearr:"↗",neArr:"⇗",nearrow:"↗",nedot:"≐̸",NegativeMediumSpace:"",NegativeThickSpace:"",NegativeThinSpace:"",NegativeVeryThinSpace:"",nequiv:"≢",nesear:"⤨",nesim:"≂̸",NestedGreaterGreater:"≫",NestedLessLess:"≪",NewLine:"\n",nexist:"∄",nexists:"∄",nfr:"𝔫",Nfr:"𝔑",nge:"≱",ngE:"≧̸",ngeq:"≱",ngeqq:"≧̸",ngeqslant:"⩾̸",nges:"⩾̸",nGg:"⋙̸",ngsim:"≵",ngt:"≯",nGt:"≫⃒",ngtr:"≯",nGtv:"≫̸",nharr:"↮",nhArr:"⇎",nhpar:"⫲",ni:"∋",nis:"⋼",nisd:"⋺",niv:"∋",njcy:"њ",NJcy:"Њ",nlarr:"↚",nlArr:"⇍",nldr:"‥",nle:"≰",nlE:"≦̸",nleftarrow:"↚",nLeftarrow:"⇍",nleftrightarrow:"↮",nLeftrightarrow:"⇎",nleq:"≰",nleqq:"≦̸",nleqslant:"⩽̸",nles:"⩽̸",nless:"≮",nLl:"⋘̸",nlsim:"≴",nlt:"≮",nLt:"≪⃒",nltri:"⋪",nltrie:"⋬",nLtv:"≪̸",nmid:"∤",NoBreak:"",NonBreakingSpace:" ",nopf:"𝕟",Nopf:"ℕ",not:"¬",Not:"⫬",NotCongruent:"≢",NotCupCap:"≭",NotDoubleVerticalBar:"∦",NotElement:"∉",NotEqual:"≠",NotEqualTilde:"≂̸",NotExists:"∄",NotGreater:"≯",NotGreaterEqual:"≱",NotGreaterFullEqual:"≧̸",NotGreaterGreater:"≫̸",NotGreaterLess:"≹",NotGreaterSlantEqual:"⩾̸",NotGreaterTilde:"≵",NotHumpDownHump:"≎̸",NotHumpEqual:"≏̸",notin:"∉",notindot:"⋵̸",notinE:"⋹̸",notinva:"∉",notinvb:"⋷",notinvc:"⋶",NotLeftTriangle:"⋪",NotLeftTriangleBar:"⧏̸",NotLeftTriangleEqual:"⋬",NotLess:"≮",NotLessEqual:"≰",NotLessGreater:"≸",NotLessLess:"≪̸",NotLessSlantEqual:"⩽̸",NotLessTilde:"≴",NotNestedGreaterGreater:"⪢̸",NotNestedLessLess:"⪡̸",notni:"∌",notniva:"∌",notnivb:"⋾",notnivc:"⋽",NotPrecedes:"⊀",NotPrecedesEqual:"⪯̸",NotPrecedesSlantEqual:"⋠",NotReverseElement:"∌",NotRightTriangle:"⋫",NotRightTriangleBar:"⧐̸",NotRightTriangleEqual:"⋭",NotSquareSubset:"⊏̸",NotSquareSubsetEqual:"⋢",NotSquareSuperset:"⊐̸",NotSquareSupersetEqual:"⋣",NotSubset:"⊂⃒",NotSubsetEqual:"⊈",NotSucceeds:"⊁",NotSucceedsEqual:"⪰̸",NotSucceedsSlantEqual:"⋡",NotSucceedsTilde:"≿̸",NotSuperset:"⊃⃒",NotSupersetEqual:"⊉",NotTilde:"≁",NotTildeEqual:"≄",NotTildeFullEqual:"≇",NotTildeTilde:"≉",NotVerticalBar:"∤",npar:"∦",nparallel:"∦",nparsl:"⫽⃥",npart:"∂̸",npolint:"⨔",npr:"⊀",nprcue:"⋠",npre:"⪯̸",nprec:"⊀",npreceq:"⪯̸",nrarr:"↛",nrArr:"⇏",nrarrc:"⤳̸",nrarrw:"↝̸",nrightarrow:"↛",nRightarrow:"⇏",nrtri:"⋫",nrtrie:"⋭",nsc:"⊁",nsccue:"⋡",nsce:"⪰̸",nscr:"𝓃",Nscr:"𝒩",nshortmid:"∤",nshortparallel:"∦",nsim:"≁",nsime:"≄",nsimeq:"≄",nsmid:"∤",nspar:"∦",nsqsube:"⋢",nsqsupe:"⋣",nsub:"⊄",nsube:"⊈",nsubE:"⫅̸",nsubset:"⊂⃒",nsubseteq:"⊈",nsubseteqq:"⫅̸",nsucc:"⊁",nsucceq:"⪰̸",nsup:"⊅",nsupe:"⊉",nsupE:"⫆̸",nsupset:"⊃⃒",nsupseteq:"⊉",nsupseteqq:"⫆̸",ntgl:"≹",ntilde:"ñ",Ntilde:"Ñ",ntlg:"≸",ntriangleleft:"⋪",ntrianglelefteq:"⋬",ntriangleright:"⋫",ntrianglerighteq:"⋭",nu:"ν",Nu:"Ν",num:"#",numero:"№",numsp:" ",nvap:"≍⃒",nvdash:"⊬",nvDash:"⊭",nVdash:"⊮",nVDash:"⊯",nvge:"≥⃒",nvgt:">⃒",nvHarr:"⤄",nvinfin:"⧞",nvlArr:"⤂",nvle:"≤⃒",nvlt:"<⃒",nvltrie:"⊴⃒",nvrArr:"⤃",nvrtrie:"⊵⃒",nvsim:"∼⃒",nwarhk:"⤣",nwarr:"↖",nwArr:"⇖",nwarrow:"↖",nwnear:"⤧",oacute:"ó",Oacute:"Ó",oast:"⊛",ocir:"⊚",ocirc:"ô",Ocirc:"Ô",ocy:"о",Ocy:"О",odash:"⊝",odblac:"ő",Odblac:"Ő",odiv:"⨸",odot:"⊙",odsold:"⦼",oelig:"œ",OElig:"Œ",ofcir:"⦿",ofr:"𝔬",Ofr:"𝔒",ogon:"˛",ograve:"ò",Ograve:"Ò",ogt:"⧁",ohbar:"⦵",ohm:"Ω",oint:"∮",olarr:"↺",olcir:"⦾",olcross:"⦻",oline:"‾",olt:"⧀",omacr:"ō",Omacr:"Ō",omega:"ω",Omega:"Ω",omicron:"ο",Omicron:"Ο",omid:"⦶",ominus:"⊖",oopf:"𝕠",Oopf:"𝕆",opar:"⦷",OpenCurlyDoubleQuote:"“",OpenCurlyQuote:"‘",operp:"⦹",oplus:"⊕",or:"∨",Or:"⩔",orarr:"↻",ord:"⩝",order:"ℴ",orderof:"ℴ",ordf:"ª",ordm:"º",origof:"⊶",oror:"⩖",orslope:"⩗",orv:"⩛",oS:"Ⓢ",oscr:"ℴ",Oscr:"𝒪",oslash:"ø",Oslash:"Ø",osol:"⊘",otilde:"õ",Otilde:"Õ",otimes:"⊗",Otimes:"⨷",otimesas:"⨶",ouml:"ö",Ouml:"Ö",ovbar:"⌽",OverBar:"‾",OverBrace:"⏞",OverBracket:"⎴",OverParenthesis:"⏜",par:"∥",para:"¶",parallel:"∥",parsim:"⫳",parsl:"⫽",part:"∂",PartialD:"∂",pcy:"п",Pcy:"П",percnt:"%",period:".",permil:"‰",perp:"⊥",pertenk:"‱",pfr:"𝔭",Pfr:"𝔓",phi:"φ",Phi:"Φ",phiv:"ϕ",phmmat:"ℳ",phone:"☎",pi:"π",Pi:"Π",pitchfork:"⋔",piv:"ϖ",planck:"ℏ",planckh:"ℎ",plankv:"ℏ",plus:"+",plusacir:"⨣",plusb:"⊞",pluscir:"⨢",plusdo:"∔",plusdu:"⨥",pluse:"⩲",PlusMinus:"±",plusmn:"±",plussim:"⨦",plustwo:"⨧",pm:"±",Poincareplane:"ℌ",pointint:"⨕",popf:"𝕡",Popf:"ℙ",pound:"£",pr:"≺",Pr:"⪻",prap:"⪷",prcue:"≼",pre:"⪯",prE:"⪳",prec:"≺",precapprox:"⪷",preccurlyeq:"≼",Precedes:"≺",PrecedesEqual:"⪯",PrecedesSlantEqual:"≼",PrecedesTilde:"≾",preceq:"⪯",precnapprox:"⪹",precneqq:"⪵",precnsim:"⋨",precsim:"≾",prime:"′",Prime:"″",primes:"ℙ",prnap:"⪹",prnE:"⪵",prnsim:"⋨",prod:"∏",Product:"∏",profalar:"⌮",profline:"⌒",profsurf:"⌓",prop:"∝",Proportion:"∷",Proportional:"∝",propto:"∝",prsim:"≾",prurel:"⊰",pscr:"𝓅",Pscr:"𝒫",psi:"ψ",Psi:"Ψ",puncsp:" ",qfr:"𝔮",Qfr:"𝔔",qint:"⨌",qopf:"𝕢",Qopf:"ℚ",qprime:"⁗",qscr:"𝓆",Qscr:"𝒬",quaternions:"ℍ",quatint:"⨖",quest:"?",questeq:"≟",quot:'"',QUOT:'"',rAarr:"⇛",race:"∽̱",racute:"ŕ",Racute:"Ŕ",radic:"√",raemptyv:"⦳",rang:"⟩",Rang:"⟫",rangd:"⦒",range:"⦥",rangle:"⟩",raquo:"»",rarr:"→",rArr:"⇒",Rarr:"↠",rarrap:"⥵",rarrb:"⇥",rarrbfs:"⤠",rarrc:"⤳",rarrfs:"⤞",rarrhk:"↪",rarrlp:"↬",rarrpl:"⥅",rarrsim:"⥴",rarrtl:"↣",Rarrtl:"⤖",rarrw:"↝",ratail:"⤚",rAtail:"⤜",ratio:"∶",rationals:"ℚ",rbarr:"⤍",rBarr:"⤏",RBarr:"⤐",rbbrk:"❳",rbrace:"}",rbrack:"]",rbrke:"⦌",rbrksld:"⦎",rbrkslu:"⦐",rcaron:"ř",Rcaron:"Ř",rcedil:"ŗ",Rcedil:"Ŗ",rceil:"⌉",rcub:"}",rcy:"р",Rcy:"Р",rdca:"⤷",rdldhar:"⥩",rdquo:"”",rdquor:"”",rdsh:"↳",Re:"ℜ",real:"ℜ",realine:"ℛ",realpart:"ℜ",reals:"ℝ",rect:"▭",reg:"®",REG:"®",ReverseElement:"∋",ReverseEquilibrium:"⇋",ReverseUpEquilibrium:"⥯",rfisht:"⥽",rfloor:"⌋",rfr:"𝔯",Rfr:"ℜ",rHar:"⥤",rhard:"⇁",rharu:"⇀",rharul:"⥬",rho:"ρ",Rho:"Ρ",rhov:"ϱ",RightAngleBracket:"⟩",rightarrow:"→",Rightarrow:"⇒",RightArrow:"→",RightArrowBar:"⇥",RightArrowLeftArrow:"⇄",rightarrowtail:"↣",RightCeiling:"⌉",RightDoubleBracket:"⟧",RightDownTeeVector:"⥝",RightDownVector:"⇂",RightDownVectorBar:"⥕",RightFloor:"⌋",rightharpoondown:"⇁",rightharpoonup:"⇀",rightleftarrows:"⇄",rightleftharpoons:"⇌",rightrightarrows:"⇉",rightsquigarrow:"↝",RightTee:"⊢",RightTeeArrow:"↦",RightTeeVector:"⥛",rightthreetimes:"⋌",RightTriangle:"⊳",RightTriangleBar:"⧐",RightTriangleEqual:"⊵",RightUpDownVector:"⥏",RightUpTeeVector:"⥜",RightUpVector:"↾",RightUpVectorBar:"⥔",RightVector:"⇀",RightVectorBar:"⥓",ring:"˚",risingdotseq:"≓",rlarr:"⇄",rlhar:"⇌",rlm:"",rmoust:"⎱",rmoustache:"⎱",rnmid:"⫮",roang:"⟭",roarr:"⇾",robrk:"⟧",ropar:"⦆",ropf:"𝕣",Ropf:"ℝ",roplus:"⨮",rotimes:"⨵",RoundImplies:"⥰",rpar:")",rpargt:"⦔",rppolint:"⨒",rrarr:"⇉",Rrightarrow:"⇛",rsaquo:"›",rscr:"𝓇",Rscr:"ℛ",rsh:"↱",Rsh:"↱",rsqb:"]",rsquo:"’",rsquor:"’",rthree:"⋌",rtimes:"⋊",rtri:"▹",rtrie:"⊵",rtrif:"▸",rtriltri:"⧎",RuleDelayed:"⧴",ruluhar:"⥨",rx:"℞",sacute:"ś",Sacute:"Ś",sbquo:"‚",sc:"≻",Sc:"⪼",scap:"⪸",scaron:"š",Scaron:"Š",sccue:"≽",sce:"⪰",scE:"⪴",scedil:"ş",Scedil:"Ş",scirc:"ŝ",Scirc:"Ŝ",scnap:"⪺",scnE:"⪶",scnsim:"⋩",scpolint:"⨓",scsim:"≿",scy:"с",Scy:"С",sdot:"⋅",sdotb:"⊡",sdote:"⩦",searhk:"⤥",searr:"↘",seArr:"⇘",searrow:"↘",sect:"§",semi:";",seswar:"⤩",setminus:"∖",setmn:"∖",sext:"✶",sfr:"𝔰",Sfr:"𝔖",sfrown:"⌢",sharp:"♯",shchcy:"щ",SHCHcy:"Щ",shcy:"ш",SHcy:"Ш",ShortDownArrow:"↓",ShortLeftArrow:"←",shortmid:"∣",shortparallel:"∥",ShortRightArrow:"→",ShortUpArrow:"↑",shy:"",sigma:"σ",Sigma:"Σ",sigmaf:"ς",sigmav:"ς",sim:"∼",simdot:"⩪",sime:"≃",simeq:"≃",simg:"⪞",simgE:"⪠",siml:"⪝",simlE:"⪟",simne:"≆",simplus:"⨤",simrarr:"⥲",slarr:"←",SmallCircle:"∘",smallsetminus:"∖",smashp:"⨳",smeparsl:"⧤",smid:"∣",smile:"⌣",smt:"⪪",smte:"⪬",smtes:"⪬︀",softcy:"ь",SOFTcy:"Ь",sol:"/",solb:"⧄",solbar:"⌿",sopf:"𝕤",Sopf:"𝕊",spades:"♠",spadesuit:"♠",spar:"∥",sqcap:"⊓",sqcaps:"⊓︀",sqcup:"⊔",sqcups:"⊔︀",Sqrt:"√",sqsub:"⊏",sqsube:"⊑",sqsubset:"⊏",sqsubseteq:"⊑",sqsup:"⊐",sqsupe:"⊒",sqsupset:"⊐",sqsupseteq:"⊒",squ:"□",square:"□",Square:"□",SquareIntersection:"⊓",SquareSubset:"⊏",SquareSubsetEqual:"⊑",SquareSuperset:"⊐",SquareSupersetEqual:"⊒",SquareUnion:"⊔",squarf:"▪",squf:"▪",srarr:"→",sscr:"𝓈",Sscr:"𝒮",ssetmn:"∖",ssmile:"⌣",sstarf:"⋆",star:"☆",Star:"⋆",starf:"★",straightepsilon:"ϵ",straightphi:"ϕ",strns:"¯",sub:"⊂",Sub:"⋐",subdot:"⪽",sube:"⊆",subE:"⫅",subedot:"⫃",submult:"⫁",subne:"⊊",subnE:"⫋",subplus:"⪿",subrarr:"⥹",subset:"⊂",Subset:"⋐",subseteq:"⊆",subseteqq:"⫅",SubsetEqual:"⊆",subsetneq:"⊊",subsetneqq:"⫋",subsim:"⫇",subsub:"⫕",subsup:"⫓",succ:"≻",succapprox:"⪸",succcurlyeq:"≽",Succeeds:"≻",SucceedsEqual:"⪰",SucceedsSlantEqual:"≽",SucceedsTilde:"≿",succeq:"⪰",succnapprox:"⪺",succneqq:"⪶",succnsim:"⋩",succsim:"≿",SuchThat:"∋",sum:"∑",Sum:"∑",sung:"♪",sup:"⊃",Sup:"⋑",sup1:"¹",sup2:"²",sup3:"³",supdot:"⪾",supdsub:"⫘",supe:"⊇",supE:"⫆",supedot:"⫄",Superset:"⊃",SupersetEqual:"⊇",suphsol:"⟉",suphsub:"⫗",suplarr:"⥻",supmult:"⫂",supne:"⊋",supnE:"⫌",supplus:"⫀",supset:"⊃",Supset:"⋑",supseteq:"⊇",supseteqq:"⫆",supsetneq:"⊋",supsetneqq:"⫌",supsim:"⫈",supsub:"⫔",supsup:"⫖",swarhk:"⤦",swarr:"↙",swArr:"⇙",swarrow:"↙",swnwar:"⤪",szlig:"ß",Tab:"\t",target:"⌖",tau:"τ",Tau:"Τ",tbrk:"⎴",tcaron:"ť",Tcaron:"Ť",tcedil:"ţ",Tcedil:"Ţ",tcy:"т",Tcy:"Т",tdot:"⃛",telrec:"⌕",tfr:"𝔱",Tfr:"𝔗",there4:"∴",therefore:"∴",Therefore:"∴",theta:"θ",Theta:"Θ",thetasym:"ϑ",thetav:"ϑ",thickapprox:"≈",thicksim:"∼",ThickSpace:" ",thinsp:" ",ThinSpace:" ",thkap:"≈",thksim:"∼",thorn:"þ",THORN:"Þ",tilde:"˜",Tilde:"∼",TildeEqual:"≃",TildeFullEqual:"≅",TildeTilde:"≈",times:"×",timesb:"⊠",timesbar:"⨱",timesd:"⨰",tint:"∭",toea:"⤨",top:"⊤",topbot:"⌶",topcir:"⫱",topf:"𝕥",Topf:"𝕋",topfork:"⫚",tosa:"⤩",tprime:"‴",trade:"™",TRADE:"™",triangle:"▵",triangledown:"▿",triangleleft:"◃",trianglelefteq:"⊴",triangleq:"≜",triangleright:"▹",trianglerighteq:"⊵",tridot:"◬",trie:"≜",triminus:"⨺",TripleDot:"⃛",triplus:"⨹",trisb:"⧍",tritime:"⨻",trpezium:"⏢",tscr:"𝓉",Tscr:"𝒯",tscy:"ц",TScy:"Ц",tshcy:"ћ",TSHcy:"Ћ",tstrok:"ŧ",Tstrok:"Ŧ",twixt:"≬",twoheadleftarrow:"↞",twoheadrightarrow:"↠",uacute:"ú",Uacute:"Ú",uarr:"↑",uArr:"⇑",Uarr:"↟",Uarrocir:"⥉",ubrcy:"ў",Ubrcy:"Ў",ubreve:"ŭ",Ubreve:"Ŭ",ucirc:"û",Ucirc:"Û",ucy:"у",Ucy:"У",udarr:"⇅",udblac:"ű",Udblac:"Ű",udhar:"⥮",ufisht:"⥾",ufr:"𝔲",Ufr:"𝔘",ugrave:"ù",Ugrave:"Ù",uHar:"⥣",uharl:"↿",uharr:"↾",uhblk:"▀",ulcorn:"⌜",ulcorner:"⌜",ulcrop:"⌏",ultri:"◸",umacr:"ū",Umacr:"Ū",uml:"¨",UnderBar:"_",UnderBrace:"⏟",UnderBracket:"⎵",UnderParenthesis:"⏝",Union:"⋃",UnionPlus:"⊎",uogon:"ų",Uogon:"Ų",uopf:"𝕦",Uopf:"𝕌",uparrow:"↑",Uparrow:"⇑",UpArrow:"↑",UpArrowBar:"⤒",UpArrowDownArrow:"⇅",updownarrow:"↕",Updownarrow:"⇕",UpDownArrow:"↕",UpEquilibrium:"⥮",upharpoonleft:"↿",upharpoonright:"↾",uplus:"⊎",UpperLeftArrow:"↖",UpperRightArrow:"↗",upsi:"υ",Upsi:"ϒ",upsih:"ϒ",upsilon:"υ",Upsilon:"Υ",UpTee:"⊥",UpTeeArrow:"↥",upuparrows:"⇈",urcorn:"⌝",urcorner:"⌝",urcrop:"⌎",uring:"ů",Uring:"Ů",urtri:"◹",uscr:"𝓊",Uscr:"𝒰",utdot:"⋰",utilde:"ũ",Utilde:"Ũ",utri:"▵",utrif:"▴",uuarr:"⇈",uuml:"ü",Uuml:"Ü",uwangle:"⦧",vangrt:"⦜",varepsilon:"ϵ",varkappa:"ϰ",varnothing:"∅",varphi:"ϕ",varpi:"ϖ",varpropto:"∝",varr:"↕",vArr:"⇕",varrho:"ϱ",varsigma:"ς",varsubsetneq:"⊊︀",varsubsetneqq:"⫋︀",varsupsetneq:"⊋︀",varsupsetneqq:"⫌︀",vartheta:"ϑ",vartriangleleft:"⊲",vartriangleright:"⊳",vBar:"⫨",Vbar:"⫫",vBarv:"⫩",vcy:"в",Vcy:"В",vdash:"⊢",vDash:"⊨",Vdash:"⊩",VDash:"⊫",Vdashl:"⫦",vee:"∨",Vee:"⋁",veebar:"⊻",veeeq:"≚",vellip:"⋮",verbar:"|",Verbar:"‖",vert:"|",Vert:"‖",VerticalBar:"∣",VerticalLine:"|",VerticalSeparator:"❘",VerticalTilde:"≀",VeryThinSpace:" ",vfr:"𝔳",Vfr:"𝔙",vltri:"⊲",vnsub:"⊂⃒",vnsup:"⊃⃒",vopf:"𝕧",Vopf:"𝕍",vprop:"∝",vrtri:"⊳",vscr:"𝓋",Vscr:"𝒱",vsubne:"⊊︀",vsubnE:"⫋︀",vsupne:"⊋︀",vsupnE:"⫌︀",Vvdash:"⊪",vzigzag:"⦚",wcirc:"ŵ",Wcirc:"Ŵ",wedbar:"⩟",wedge:"∧",Wedge:"⋀",wedgeq:"≙",weierp:"℘",wfr:"𝔴",Wfr:"𝔚",wopf:"𝕨",Wopf:"𝕎",wp:"℘",wr:"≀",wreath:"≀",wscr:"𝓌",Wscr:"𝒲",xcap:"⋂",xcirc:"◯",xcup:"⋃",xdtri:"▽",xfr:"𝔵",Xfr:"𝔛",xharr:"⟷",xhArr:"⟺",xi:"ξ",Xi:"Ξ",xlarr:"⟵",xlArr:"⟸",xmap:"⟼",xnis:"⋻",xodot:"⨀",xopf:"𝕩",Xopf:"𝕏",xoplus:"⨁",xotime:"⨂",xrarr:"⟶",xrArr:"⟹",xscr:"𝓍",Xscr:"𝒳",xsqcup:"⨆",xuplus:"⨄",xutri:"△",xvee:"⋁",xwedge:"⋀",yacute:"ý",Yacute:"Ý",yacy:"я",YAcy:"Я",ycirc:"ŷ",Ycirc:"Ŷ",ycy:"ы",Ycy:"Ы",yen:"¥",yfr:"𝔶",Yfr:"𝔜",yicy:"ї",YIcy:"Ї",yopf:"𝕪",Yopf:"𝕐",yscr:"𝓎",Yscr:"𝒴",yucy:"ю",YUcy:"Ю",yuml:"ÿ",Yuml:"Ÿ",zacute:"ź",Zacute:"Ź",zcaron:"ž",Zcaron:"Ž",zcy:"з",Zcy:"З",zdot:"ż",Zdot:"Ż",zeetrf:"ℨ",ZeroWidthSpace:"",zeta:"ζ",Zeta:"Ζ",zfr:"𝔷",Zfr:"ℨ",zhcy:"ж",ZHcy:"Ж",zigrarr:"⇝",zopf:"𝕫",Zopf:"ℤ",zscr:"𝓏",Zscr:"𝒵",zwj:"",zwnj:""},v={aacute:"á",Aacute:"Á",acirc:"â",Acirc:"Â",acute:"´",aelig:"æ",AElig:"Æ",agrave:"à",Agrave:"À",amp:"&",AMP:"&",aring:"å",Aring:"Å",atilde:"ã",Atilde:"Ã",auml:"ä",Auml:"Ä",brvbar:"¦",ccedil:"ç",Ccedil:"Ç",cedil:"¸",cent:"¢",copy:"©",COPY:"©",curren:"¤",deg:"°",divide:"÷",eacute:"é",Eacute:"É",ecirc:"ê",Ecirc:"Ê",egrave:"è",Egrave:"È",eth:"ð",ETH:"Ð",euml:"ë",Euml:"Ë",frac12:"½",frac14:"¼",frac34:"¾",gt:">",GT:">",iacute:"í",Iacute:"Í",icirc:"î",Icirc:"Î",iexcl:"¡",igrave:"ì",Igrave:"Ì",iquest:"¿",iuml:"ï",Iuml:"Ï",laquo:"«",lt:"<",LT:"<",macr:"¯",micro:"µ",middot:"·",nbsp:" ",not:"¬",ntilde:"ñ",Ntilde:"Ñ",oacute:"ó",Oacute:"Ó",ocirc:"ô",Ocirc:"Ô",ograve:"ò",Ograve:"Ò",ordf:"ª",ordm:"º",oslash:"ø",Oslash:"Ø",otilde:"õ",Otilde:"Õ",ouml:"ö",Ouml:"Ö",para:"¶",plusmn:"±",pound:"£",quot:'"',QUOT:'"',raquo:"»",reg:"®",REG:"®",sect:"§",shy:"",sup1:"¹",sup2:"²",sup3:"³",szlig:"ß",thorn:"þ",THORN:"Þ",times:"×",uacute:"ú",Uacute:"Ú",ucirc:"û",Ucirc:"Û",ugrave:"ù",Ugrave:"Ù",uml:"¨",uuml:"ü",Uuml:"Ü",yacute:"ý",Yacute:"Ý",yen:"¥",yuml:"ÿ"},b={0:"�",128:"€",130:"‚",131:"ƒ",132:"„",133:"…",134:"†",135:"‡",136:"ˆ",137:"‰",138:"Š",139:"‹",140:"Œ",142:"Ž",145:"‘",146:"’",147:"“",148:"”",149:"•",150:"–",151:"—",152:"˜",153:"™",154:"š",155:"›",156:"œ",158:"ž",159:"Ÿ"},y=[1,2,3,4,5,6,7,8,11,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,64976,64977,64978,64979,64980,64981,64982,64983,64984,64985,64986,64987,64988,64989,64990,64991,64992,64993,64994,64995,64996,64997,64998,64999,65e3,65001,65002,65003,65004,65005,65006,65007,65534,65535,131070,131071,196606,196607,262142,262143,327678,327679,393214,393215,458750,458751,524286,524287,589822,589823,655358,655359,720894,720895,786430,786431,851966,851967,917502,917503,983038,983039,1048574,1048575,1114110,1114111],w=String.fromCharCode,A={}.hasOwnProperty,x=function(e,r){return A.call(e,r)},k=function(e,r){if(!e)return r;var t,n={};for(t in r)n[t]=x(e,t)?e[t]:r[t];return n},E=function(e,r){var t="";return e>=55296&&e<=57343||e>1114111?(r&&L("character reference outside the permissible Unicode range"),"�"):x(b,e)?(r&&L("disallowed character reference"),b[e]):(r&&function(e,r){for(var t=-1,n=e.length;++t<n;)if(e[t]==r)return!0;return!1}(y,e)&&L("disallowed character reference"),e>65535&&(t+=w((e-=65536)>>>10&1023|55296),e=56320|1023&e),t+=w(e))},q=function(e){return"&#x"+e.toString(16).toUpperCase()+";"},D=function(e){return"&#"+e+";"},L=function(e){throw Error("Parse error: "+e)},S=function(e,r){(r=k(r,S.options)).strict&&m.test(e)&&L("forbidden code point");var t=r.encodeEverything,n=r.useNamedReferences,a=r.allowUnsafeSymbols,i=r.decimal?D:q,d=function(e){return i(e.charCodeAt(0))};return t?(e=e.replace(s,(function(e){return n&&x(u,e)?"&"+u[e]+";":d(e)})),n&&(e=e.replace(/>\u20D2/g,">⃒").replace(/<\u20D2/g,"<⃒").replace(/fj/g,"fj")),n&&(e=e.replace(c,(function(e){return"&"+u[e]+";"})))):n?(a||(e=e.replace(p,(function(e){return"&"+u[e]+";"}))),e=(e=e.replace(/>\u20D2/g,">⃒").replace(/<\u20D2/g,"<⃒")).replace(c,(function(e){return"&"+u[e]+";"}))):a||(e=e.replace(p,d)),e.replace(o,(function(e){var r=e.charCodeAt(0),t=e.charCodeAt(1);return i(1024*(r-55296)+t-56320+65536)})).replace(l,d)};S.options={allowUnsafeSymbols:!1,encodeEverything:!1,strict:!1,useNamedReferences:!1,decimal:!1};var T=function(e,r){var t=(r=k(r,T.options)).strict;return t&&f.test(e)&&L("malformed character reference"),e.replace(g,(function(e,n,a,i,o,s,l,c,u){var p,d,f,m,g,b;return n?h[g=n]:a?(g=a,(b=i)&&r.isAttributeValue?(t&&"="==b&&L("`&` did not start a character reference"),e):(t&&L("named character reference was not terminated by a semicolon"),v[g]+(b||""))):o?(f=o,d=s,t&&!d&&L("character reference was not terminated by a semicolon"),p=parseInt(f,10),E(p,t)):l?(m=l,d=c,t&&!d&&L("character reference was not terminated by a semicolon"),p=parseInt(m,16),E(p,t)):(t&&L("named character reference was not terminated by a semicolon"),e)}))};T.options={isAttributeValue:!1,strict:!1};var C={version:"1.2.0",encode:S,decode:T,escape:function(e){return e.replace(p,(function(e){return d[e]}))},unescape:T};if(n&&!n.nodeType)if(a)a.exports=C;else for(var B in C)x(C,B)&&(n[B]=C[B]);else t.he=C}(vr)}));function yr(e,r,t){var n=t||{},a=n.number,i="$$v";n.trim&&(i="(typeof $$v === 'string'? $$v.trim(): $$v)"),a&&(i="_n("+i+")");var o=wr(r,i);e.model={value:"("+r+")",expression:JSON.stringify(r),callback:"function ($$v) {"+o+"}"}}function wr(e,r){var t=function(e){if(e=e.trim(),lr=e.length,e.indexOf("[")<0||e.lastIndexOf("]")<lr-1)return(pr=e.lastIndexOf("."))>-1?{exp:e.slice(0,pr),key:'"'+e.slice(pr+1)+'"'}:{exp:e,key:null};for(cr=e,pr=dr=fr=0;!xr();)kr(ur=Ar())?qr(ur):91===ur&&Er(ur);return{exp:e.slice(0,dr),key:e.slice(dr+1,fr)}}(e);return null===t.key?e+"="+r:"$set("+t.exp+", "+t.key+", "+r+")"}function Ar(){return cr.charCodeAt(++pr)}function xr(){return pr>=lr}function kr(e){return 34===e||39===e}function Er(e){var r=1;for(dr=pr;!xr();)if(kr(e=Ar()))qr(e);else if(91===e&&r++,93===e&&r--,0===r){fr=pr;break}}function qr(e){for(var r=e;!xr()&&(e=Ar())!==r;);}var Dr,Lr,Sr,Tr,Cr,Br,Rr,Fr,Nr,Or=/^@|^v-on:/,Ur=/^v-|^@|^:|^#/,Ir=/([\s\S]*?)\s+(?:in|of)\s+([\s\S]*)/,Vr=/,([^,\}\]]*)(?:,([^,\}\]]*))?$/,_r=/^\(|\)$/g,Mr=/^\[.*\]$/,Gr=/:(.*)$/,Pr=/^:|^\.|^v-bind:/,jr=/\.[^.\]]+(?=[^\]]*$)/g,zr=/^v-slot(:|$)|^#/,Hr=/[\r\n]/,$r=/\s+/g,Yr=/[\s"'<>\/=]/,Jr=g(br.decode),Zr="_empty_";function Kr(e,r,t){return{type:1,tag:e,attrsList:r,attrsMap:nt(r),rawAttrsMap:{},parent:t,children:[]}}function Qr(e,r){Dr=r.warn||Ze,Br=r.isPreTag||x,Rr=r.mustUseProp||x,Fr=r.getTagNamespace||x;var t=r.isReservedTag||x;Nr=function(e){return!!e.component||!t(e.tag)},Sr=Ke(r.modules,"transformNode"),Tr=Ke(r.modules,"preTransformNode"),Cr=Ke(r.modules,"postTransformNode"),Lr=r.delimiters;var n,a,i=[],o=!1!==r.preserveWhitespace,s=r.whitespace,l=!1,c=!1,u=!1;function p(e,r){u||(u=!0,Dr(e,r))}function d(e){if(f(e),l||e.processed||(e=Wr(e,r)),i.length||e===n||(n.if&&(e.elseif||e.else)?(m(e),et(n,{exp:e.elseif,block:e})):p("Component template should contain exactly one root element. If you are using v-if on multiple elements, use v-else-if to chain them instead.",{start:e.start})),a&&!e.forbidden)if(e.elseif||e.else)o=e,(s=function(e){for(var r=e.length;r--;){if(1===e[r].type)return e[r];" "!==e[r].text&&Dr('text "'+e[r].text.trim()+'" between v-if and v-else(-if) will be ignored.',e[r]),e.pop()}}(a.children))&&s.if?et(s,{exp:o.elseif,block:o}):Dr("v-"+(o.elseif?'else-if="'+o.elseif+'"':"else")+" used on element <"+o.tag+"> without corresponding v-if.",o.rawAttrsMap[o.elseif?"v-else-if":"v-else"]);else{if(e.slotScope){var t=e.slotTarget||'"default"';(a.scopedSlots||(a.scopedSlots={}))[t]=e}a.children.push(e),e.parent=a}var o,s;e.children=e.children.filter((function(e){return!e.slotScope})),f(e),e.pre&&(l=!1),Br(e.tag)&&(c=!1);for(var u=0;u<Cr.length;u++)Cr[u](e,r)}function f(e){if(!c)for(var r;(r=e.children[e.children.length-1])&&3===r.type&&" "===r.text;)e.children.pop()}function m(e){"slot"!==e.tag&&"template"!==e.tag||p("Cannot use <"+e.tag+"> as component root element because it may contain multiple nodes.",{start:e.start}),e.attrsMap.hasOwnProperty("v-for")&&p("Cannot use v-for on stateful component root element because it renders multiple elements.",e.rawAttrsMap["v-for"])}return H(e,{warn:Dr,expectHTML:r.expectHTML,isUnaryTag:r.isUnaryTag,canBeLeftOpenTag:r.canBeLeftOpenTag,shouldDecodeNewlines:r.shouldDecodeNewlines,shouldDecodeNewlinesForHref:r.shouldDecodeNewlinesForHref,shouldKeepComment:r.comments,outputSourceRange:r.outputSourceRange,start:function(e,t,o,s,u){var p=a&&a.ns||Fr(e);ee&&"svg"===p&&(t=function(e){for(var r=[],t=0;t<e.length;t++){var n=e[t];at.test(n.name)||(n.name=n.name.replace(it,""),r.push(n))}return r}(t));var f,g=Kr(e,t,a);p&&(g.ns=p),r.outputSourceRange&&(g.start=s,g.end=u,g.rawAttrsMap=g.attrsList.reduce((function(e,r){return e[r.name]=r,e}),{})),t.forEach((function(e){Yr.test(e.name)&&Dr("Invalid dynamic argument expression: attribute names cannot contain spaces, quotes, <, >, / or =.",{start:e.start+e.name.indexOf("["),end:e.start+e.name.length})})),"style"!==(f=g).tag&&("script"!==f.tag||f.attrsMap.type&&"text/javascript"!==f.attrsMap.type)||ae()||(g.forbidden=!0,Dr("Templates should only be responsible for mapping the state to the UI. Avoid placing tags with side-effects in your templates, such as <"+e+">, as they will not be parsed.",{start:g.start}));for(var h=0;h<Tr.length;h++)g=Tr[h](g,r)||g;l||(function(e){null!=ir(e,"v-pre")&&(e.pre=!0)}(g),g.pre&&(l=!0)),Br(g.tag)&&(c=!0),l?function(e){var r=e.attrsList,t=r.length;if(t)for(var n=e.attrs=new Array(t),a=0;a<t;a++)n[a]={name:r[a].name,value:JSON.stringify(r[a].value)},null!=r[a].start&&(n[a].start=r[a].start,n[a].end=r[a].end);else e.pre||(e.plain=!0)}(g):g.processed||(Xr(g),function(e){var r=ir(e,"v-if");if(r)e.if=r,et(e,{exp:r,block:e});else{null!=ir(e,"v-else")&&(e.else=!0);var t=ir(e,"v-else-if");t&&(e.elseif=t)}}(g),function(e){null!=ir(e,"v-once")&&(e.once=!0)}(g)),n||m(n=g),o?d(g):(a=g,i.push(g))},end:function(e,t,n){var o=i[i.length-1];i.length-=1,a=i[i.length-1],r.outputSourceRange&&(o.end=n),d(o)},chars:function(t,n,i){if(a){if(!ee||"textarea"!==a.tag||a.attrsMap.placeholder!==t){var u,d,f,m=a.children;(t=c||t.trim()?"script"===(u=a).tag||"style"===u.tag?t:Jr(t):m.length?s?"condense"===s&&Hr.test(t)?"":" ":o?" ":"":"")&&(c||"condense"!==s||(t=t.replace($r," ")),!l&&" "!==t&&(d=Je(t,Lr))?f={type:2,expression:d.expression,tokens:d.tokens,text:t}:" "===t&&m.length&&" "===m[m.length-1].text||(f={type:3,text:t}),f&&(r.outputSourceRange&&(f.start=n,f.end=i),m.push(f)))}}else t===e?p("Component template requires a root element, rather than just text.",{start:n}):(t=t.trim())&&p('text "'+t+'" outside root element will be ignored.',{start:n})},comment:function(e,t,n){if(a){var i={type:3,text:e,isComment:!0};r.outputSourceRange&&(i.start=t,i.end=n),a.children.push(i)}}}),n}function Wr(e,r){var t;!function(e){var r=ar(e,"key");if(r){if("template"===e.tag&&Dr("<template> cannot be keyed. Place the key on real elements instead.",nr(e,"key")),e.for){var t=e.iterator2||e.iterator1,n=e.parent;t&&t===r&&n&&"transition-group"===n.tag&&Dr("Do not use v-for index as key on <transition-group> children, this is the same as not using keys.",nr(e,"key"),!0)}e.key=r}}(e),e.plain=!e.key&&!e.scopedSlots&&!e.attrsList.length,function(e){var r=ar(e,"ref");r&&(e.ref=r,e.refInFor=function(e){for(var r=e;r;){if(void 0!==r.for)return!0;r=r.parent}return!1}(e))}(e),function(e){var r;"template"===e.tag?((r=ir(e,"scope"))&&Dr('the "scope" attribute for scoped slots have been deprecated and replaced by "slot-scope" since 2.5. The new "slot-scope" attribute can also be used on plain elements in addition to <template> to denote scoped slots.',e.rawAttrsMap.scope,!0),e.slotScope=r||ir(e,"slot-scope")):(r=ir(e,"slot-scope"))&&(e.attrsMap["v-for"]&&Dr("Ambiguous combined usage of slot-scope and v-for on <"+e.tag+"> (v-for takes higher priority). Use a wrapper <template> for the scoped slot to make it clearer.",e.rawAttrsMap["slot-scope"],!0),e.slotScope=r);var t=ar(e,"slot");if(t&&(e.slotTarget='""'===t?'"default"':t,e.slotTargetDynamic=!(!e.attrsMap[":slot"]&&!e.attrsMap["v-bind:slot"]),"template"===e.tag||e.slotScope||We(e,"slot",t,nr(e,"slot"))),"template"===e.tag){var n=or(e,zr);if(n){(e.slotTarget||e.slotScope)&&Dr("Unexpected mixed usage of different slot syntaxes.",e),e.parent&&!Nr(e.parent)&&Dr("<template v-slot> can only appear at the root level inside the receiving component",e);var a=rt(n),i=a.name,o=a.dynamic;e.slotTarget=i,e.slotTargetDynamic=o,e.slotScope=n.value||Zr}}else{var s=or(e,zr);if(s){Nr(e)||Dr("v-slot can only be used on components or <template>.",s),(e.slotScope||e.slotTarget)&&Dr("Unexpected mixed usage of different slot syntaxes.",e),e.scopedSlots&&Dr("To avoid scope ambiguity, the default slot should also use <template> syntax when there are other named slots.",s);var l=e.scopedSlots||(e.scopedSlots={}),c=rt(s),u=c.name,p=c.dynamic,d=l[u]=Kr("template",[],e);d.slotTarget=u,d.slotTargetDynamic=p,d.children=e.children.filter((function(e){if(!e.slotScope)return e.parent=d,!0})),d.slotScope=s.value||Zr,e.children=[],e.plain=!1}}}(e),"slot"===(t=e).tag&&(t.slotName=ar(t,"name"),t.key&&Dr("`key` does not work on <slot> because slots are abstract outlets and can possibly expand into multiple elements. Use the key on a wrapping element instead.",nr(t,"key"))),function(e){var r;(r=ar(e,"is"))&&(e.component=r),null!=ir(e,"inline-template")&&(e.inlineTemplate=!0)}(e);for(var n=0;n<Sr.length;n++)e=Sr[n](e,r)||e;return function(e){var r,t,n,a,i,o,s,l,c=e.attrsList;for(r=0,t=c.length;r<t;r++)if(n=a=c[r].name,i=c[r].value,Ur.test(n))if(e.hasBindings=!0,(o=tt(n.replace(Ur,"")))&&(n=n.replace(jr,"")),Pr.test(n))n=n.replace(Pr,""),i=je(i),(l=Mr.test(n))&&(n=n.slice(1,-1)),0===i.trim().length&&Dr('The value for a v-bind expression cannot be empty. Found in "v-bind:'+n+'"'),o&&(o.prop&&!l&&"innerHtml"===(n=v(n))&&(n="innerHTML"),o.camel&&!l&&(n=v(n)),o.sync&&(s=wr(i,"$event"),l?tr(e,'"update:"+('+n+")",s,null,!1,Dr,c[r],!0):(tr(e,"update:"+v(n),s,null,!1,Dr,c[r]),y(n)!==v(n)&&tr(e,"update:"+y(n),s,null,!1,Dr,c[r])))),o&&o.prop||!e.component&&Rr(e.tag,e.attrsMap.type,n)?Qe(e,n,i,c[r],l):We(e,n,i,c[r],l);else if(Or.test(n))n=n.replace(Or,""),(l=Mr.test(n))&&(n=n.slice(1,-1)),tr(e,n,i,o,!1,Dr,c[r],l);else{var u=(n=n.replace(Ur,"")).match(Gr),p=u&&u[1];l=!1,p&&(n=n.slice(0,-(p.length+1)),Mr.test(p)&&(p=p.slice(1,-1),l=!0)),er(e,n,a,i,p,l,o,c[r]),"model"===n&&ot(e,i)}else Je(i,Lr)&&Dr(n+'="'+i+'": Interpolation inside attributes has been removed. Use v-bind or the colon shorthand instead. For example, instead of <div id="{{ val }}">, use <div :id="val">.',c[r]),We(e,n,JSON.stringify(i),c[r]),!e.component&&"muted"===n&&Rr(e.tag,e.attrsMap.type,n)&&Qe(e,n,"true",c[r])}(e),e}function Xr(e){var r;if(r=ir(e,"v-for")){var t=function(e){var r=e.match(Ir);if(r){var t={};t.for=r[2].trim();var n=r[1].trim().replace(_r,""),a=n.match(Vr);return a?(t.alias=n.replace(Vr,"").trim(),t.iterator1=a[1].trim(),a[2]&&(t.iterator2=a[2].trim())):t.alias=n,t}}(r);t?w(e,t):Dr("Invalid v-for expression: "+r,e.rawAttrsMap["v-for"])}}function et(e,r){e.ifConditions||(e.ifConditions=[]),e.ifConditions.push(r)}function rt(e){var r=e.name.replace(zr,"");return r||("#"!==e.name[0]?r="default":Dr("v-slot shorthand syntax requires a slot name.",e)),Mr.test(r)?{name:r.slice(1,-1),dynamic:!0}:{name:'"'+r+'"',dynamic:!1}}function tt(e){var r=e.match(jr);if(r){var t={};return r.forEach((function(e){t[e.slice(1)]=!0})),t}}function nt(e){for(var r={},t=0,n=e.length;t<n;t++)!r[e[t].name]||ee||re||Dr("duplicate attribute: "+e[t].name,e[t]),r[e[t].name]=e[t].value;return r}var at=/^xmlns:NS\d+/,it=/^NS\d+:/;function ot(e,r){for(var t=e;t;)t.for&&t.alias===r&&Dr("<"+e.tag+' v-model="'+r+'">: You are binding v-model directly to a v-for iteration alias. This will not be able to modify the v-for source array because writing to the alias is like modifying a function local variable. Consider using an array of objects and use v-model on an object property instead.',e.rawAttrsMap["v-model"]),t=t.parent}function st(e){return Kr(e.tag,e.attrsList.slice(),e.parent)}var lt,ct,ut,pt=[mr,hr,{preTransformNode:function(e,r){if("input"===e.tag){var t,n=e.attrsMap;if(!n["v-model"])return;if((n[":type"]||n["v-bind:type"])&&(t=ar(e,"type")),n.type||t||!n["v-bind"]||(t="("+n["v-bind"]+").type"),t){var a=ir(e,"v-if",!0),i=a?"&&("+a+")":"",o=null!=ir(e,"v-else",!0),s=ir(e,"v-else-if",!0),l=st(e);Xr(l),Xe(l,"type","checkbox"),Wr(l,r),l.processed=!0,l.if="("+t+")==='checkbox'"+i,et(l,{exp:l.if,block:l});var c=st(e);ir(c,"v-for",!0),Xe(c,"type","radio"),Wr(c,r),et(l,{exp:"("+t+")==='radio'"+i,block:c});var u=st(e);return ir(u,"v-for",!0),Xe(u,":type",t),Wr(u,r),et(l,{exp:a,block:u}),o?l.else=!0:s&&(l.elseif=s),l}}}}],dt="__r",ft={expectHTML:!0,modules:pt,directives:{model:function(e,r,t){lt=t;var n=r.value,a=r.modifiers,i=e.tag,o=e.attrsMap.type;if("input"===i&&"file"===o&<("<"+e.tag+' v-model="'+n+'" type="file">:\nFile inputs are read only. Use a v-on:change listener instead.',e.rawAttrsMap["v-model"]),e.component)return yr(e,n,a),!1;if("select"===i)!function(e,r,t){var n='var $$selectedVal = Array.prototype.filter.call($event.target.options,function(o){return o.selected}).map(function(o){var val = "_value" in o ? o._value : o.value;return '+(t&&t.number?"_n(val)":"val")+"});";n=n+" "+wr(r,"$event.target.multiple ? $$selectedVal : $$selectedVal[0]"),tr(e,"change",n,null,!0)}(e,n,a);else if("input"===i&&"checkbox"===o)!function(e,r,t){var n=t&&t.number,a=ar(e,"value")||"null",i=ar(e,"true-value")||"true",o=ar(e,"false-value")||"false";Qe(e,"checked","Array.isArray("+r+")?_i("+r+","+a+")>-1"+("true"===i?":("+r+")":":_q("+r+","+i+")")),tr(e,"change","var $$a="+r+",$$el=$event.target,$$c=$$el.checked?("+i+"):("+o+");if(Array.isArray($$a)){var $$v="+(n?"_n("+a+")":a)+",$$i=_i($$a,$$v);if($$el.checked){$$i<0&&("+wr(r,"$$a.concat([$$v])")+")}else{$$i>-1&&("+wr(r,"$$a.slice(0,$$i).concat($$a.slice($$i+1))")+")}}else{"+wr(r,"$$c")+"}",null,!0)}(e,n,a);else if("input"===i&&"radio"===o)!function(e,r,t){var n=t&&t.number,a=ar(e,"value")||"null";Qe(e,"checked","_q("+r+","+(a=n?"_n("+a+")":a)+")"),tr(e,"change",wr(r,a),null,!0)}(e,n,a);else{if("input"!==i&&"textarea"!==i)return yr(e,n,a),!1;!function(e,r,t){var n=e.attrsMap.type,a=e.attrsMap["v-bind:value"]||e.attrsMap[":value"],i=e.attrsMap["v-bind:type"]||e.attrsMap[":type"];if(a&&!i){var o=e.attrsMap["v-bind:value"]?"v-bind:value":":value";lt(o+'="'+a+'" conflicts with v-model on the same element because the latter already expands to a value binding internally',e.rawAttrsMap[o])}var s=t||{},l=s.lazy,c=s.number,u=s.trim,p=!l&&"range"!==n,d=l?"change":"range"===n?dt:"input",f="$event.target.value";u&&(f="$event.target.value.trim()"),c&&(f="_n("+f+")");var m=wr(r,f);p&&(m="if($event.target.composing)return;"+m),Qe(e,"value","("+r+")"),tr(e,d,m,null,!0),(u||c)&&tr(e,"blur","$forceUpdate()")}(e,n,a)}return!0},text:function(e,r){r.value&&Qe(e,"textContent","_s("+r.value+")",r)},html:function(e,r){r.value&&Qe(e,"innerHTML","_s("+r.value+")",r)}},isPreTag:function(e){return"pre"===e},isUnaryTag:k,mustUseProp:function(e,r,t){return"value"===t&&Ie(e)&&"button"!==r||"selected"===t&&"option"===e||"checked"===t&&"input"===e||"muted"===t&&"video"===e},canBeLeftOpenTag:E,isReservedTag:function(e){return Me(e)||Ge(e)},getTagNamespace:function(e){return Ge(e)?"svg":"math"===e?"math":void 0},staticKeys:function(e){return e.reduce((function(e,r){return e.concat(r.staticKeys||[])}),[]).join(",")}(pt)},mt=g((function(e){return p("type,tag,attrsList,attrsMap,plain,parent,children,attrs,start,end,rawAttrsMap"+(e?","+e:""))}));function gt(e,r){e&&(ct=mt(r.staticKeys||""),ut=r.isReservedTag||x,function e(r){if(r.static=function(e){return 2!==e.type&&(3===e.type||!(!e.pre&&(e.hasBindings||e.if||e.for||d(e.tag)||!ut(e.tag)||function(e){for(;e.parent;){if("template"!==(e=e.parent).tag)return!1;if(e.for)return!0}return!1}(e)||!Object.keys(e).every(ct))))}(r),1===r.type){if(!ut(r.tag)&&"slot"!==r.tag&&null==r.attrsMap["inline-template"])return;for(var t=0,n=r.children.length;t<n;t++){var a=r.children[t];e(a),a.static||(r.static=!1)}if(r.ifConditions)for(var i=1,o=r.ifConditions.length;i<o;i++){var s=r.ifConditions[i].block;e(s),s.static||(r.static=!1)}}}(e),function e(r,t){if(1===r.type){if((r.static||r.once)&&(r.staticInFor=t),r.static&&r.children.length&&(1!==r.children.length||3!==r.children[0].type))return void(r.staticRoot=!0);if(r.staticRoot=!1,r.children)for(var n=0,a=r.children.length;n<a;n++)e(r.children[n],t||!!r.for);if(r.ifConditions)for(var i=1,o=r.ifConditions.length;i<o;i++)e(r.ifConditions[i].block,t)}}(e,!1))}var ht=/^([\w$_]+|\([^)]*?\))\s*=>|^function(?:\s+[\w$]+)?\s*\(/,vt=/\([^)]*?\);*$/,bt=/^[A-Za-z_$][\w$]*(?:\.[A-Za-z_$][\w$]*|\['[^']*?']|\["[^"]*?"]|\[\d+]|\[[A-Za-z_$][\w$]*])*$/,yt={esc:27,tab:9,enter:13,space:32,up:38,left:37,right:39,down:40,delete:[8,46]},wt={esc:["Esc","Escape"],tab:"Tab",enter:"Enter",space:[" ","Spacebar"],up:["Up","ArrowUp"],left:["Left","ArrowLeft"],right:["Right","ArrowRight"],down:["Down","ArrowDown"],delete:["Backspace","Delete","Del"]},At=function(e){return"if("+e+")return null;"},xt={stop:"$event.stopPropagation();",prevent:"$event.preventDefault();",self:At("$event.target !== $event.currentTarget"),ctrl:At("!$event.ctrlKey"),shift:At("!$event.shiftKey"),alt:At("!$event.altKey"),meta:At("!$event.metaKey"),left:At("'button' in $event && $event.button !== 0"),middle:At("'button' in $event && $event.button !== 1"),right:At("'button' in $event && $event.button !== 2")};function kt(e,r){var t=r?"nativeOn:":"on:",n="",a="";for(var i in e){var o=Et(e[i]);e[i]&&e[i].dynamic?a+=i+","+o+",":n+='"'+i+'":'+o+","}return n="{"+n.slice(0,-1)+"}",a?t+"_d("+n+",["+a.slice(0,-1)+"])":t+n}function Et(e){if(!e)return"function(){}";if(Array.isArray(e))return"["+e.map((function(e){return Et(e)})).join(",")+"]";var r=bt.test(e.value),t=ht.test(e.value),n=bt.test(e.value.replace(vt,""));if(e.modifiers){var a="",i="",o=[];for(var s in e.modifiers)if(xt[s])i+=xt[s],yt[s]&&o.push(s);else if("exact"===s){var l=e.modifiers;i+=At(["ctrl","shift","alt","meta"].filter((function(e){return!l[e]})).map((function(e){return"$event."+e+"Key"})).join("||"))}else o.push(s);return o.length&&(a+=function(e){return"if(!$event.type.indexOf('key')&&"+e.map(qt).join("&&")+")return null;"}(o)),i&&(a+=i),"function($event){"+a+(r?"return "+e.value+"($event)":t?"return ("+e.value+")($event)":n?"return "+e.value:e.value)+"}"}return r||t?e.value:"function($event){"+(n?"return "+e.value:e.value)+"}"}function qt(e){var r=parseInt(e,10);if(r)return"$event.keyCode!=="+r;var t=yt[e],n=wt[e];return"_k($event.keyCode,"+JSON.stringify(e)+","+JSON.stringify(t)+",$event.key,"+JSON.stringify(n)+")"}var Dt={on:function(e,r){r.modifiers&&ue("v-on without argument does not support modifiers."),e.wrapListeners=function(e){return"_g("+e+","+r.value+")"}},bind:function(e,r){e.wrapData=function(t){return"_b("+t+",'"+e.tag+"',"+r.value+","+(r.modifiers&&r.modifiers.prop?"true":"false")+(r.modifiers&&r.modifiers.sync?",true":"")+")"}},cloak:A},Lt=function(e){this.options=e,this.warn=e.warn||Ze,this.transforms=Ke(e.modules,"transformCode"),this.dataGenFns=Ke(e.modules,"genData"),this.directives=w(w({},Dt),e.directives);var r=e.isReservedTag||x;this.maybeComponent=function(e){return!!e.component||!r(e.tag)},this.onceId=0,this.staticRenderFns=[],this.pre=!1};function St(e,r){var t=new Lt(r);return{render:"with(this){return "+(e?Tt(e,t):'_c("div")')+"}",staticRenderFns:t.staticRenderFns}}function Tt(e,r){if(e.parent&&(e.pre=e.pre||e.parent.pre),e.staticRoot&&!e.staticProcessed)return Ct(e,r);if(e.once&&!e.onceProcessed)return Bt(e,r);if(e.for&&!e.forProcessed)return Ft(e,r);if(e.if&&!e.ifProcessed)return Rt(e,r);if("template"!==e.tag||e.slotTarget||r.pre){if("slot"===e.tag)return function(e,r){var t=e.slotName||'"default"',n=It(e,r),a="_t("+t+(n?","+n:""),i=e.attrs||e.dynamicAttrs?Gt((e.attrs||[]).concat(e.dynamicAttrs||[]).map((function(e){return{name:v(e.name),value:e.value,dynamic:e.dynamic}}))):null,o=e.attrsMap["v-bind"];return!i&&!o||n||(a+=",null"),i&&(a+=","+i),o&&(a+=(i?"":",null")+","+o),a+")"}(e,r);var t;if(e.component)t=function(e,r,t){var n=r.inlineTemplate?null:It(r,t,!0);return"_c("+e+","+Nt(r,t)+(n?","+n:"")+")"}(e.component,e,r);else{var n;(!e.plain||e.pre&&r.maybeComponent(e))&&(n=Nt(e,r));var a=e.inlineTemplate?null:It(e,r,!0);t="_c('"+e.tag+"'"+(n?","+n:"")+(a?","+a:"")+")"}for(var i=0;i<r.transforms.length;i++)t=r.transforms[i](e,t);return t}return It(e,r)||"void 0"}function Ct(e,r){e.staticProcessed=!0;var t=r.pre;return e.pre&&(r.pre=e.pre),r.staticRenderFns.push("with(this){return "+Tt(e,r)+"}"),r.pre=t,"_m("+(r.staticRenderFns.length-1)+(e.staticInFor?",true":"")+")"}function Bt(e,r){if(e.onceProcessed=!0,e.if&&!e.ifProcessed)return Rt(e,r);if(e.staticInFor){for(var t="",n=e.parent;n;){if(n.for){t=n.key;break}n=n.parent}return t?"_o("+Tt(e,r)+","+r.onceId+++","+t+")":(r.warn("v-once can only be used inside v-for that is keyed. ",e.rawAttrsMap["v-once"]),Tt(e,r))}return Ct(e,r)}function Rt(e,r,t,n){return e.ifProcessed=!0,function e(r,t,n,a){if(!r.length)return a||"_e()";var i=r.shift();return i.exp?"("+i.exp+")?"+o(i.block)+":"+e(r,t,n,a):""+o(i.block);function o(e){return n?n(e,t):e.once?Bt(e,t):Tt(e,t)}}(e.ifConditions.slice(),r,t,n)}function Ft(e,r,t,n){var a=e.for,i=e.alias,o=e.iterator1?","+e.iterator1:"",s=e.iterator2?","+e.iterator2:"";return r.maybeComponent(e)&&"slot"!==e.tag&&"template"!==e.tag&&!e.key&&r.warn("<"+e.tag+' v-for="'+i+" in "+a+'">: component lists rendered with v-for should have explicit keys. See https://vuejs.org/guide/list.html#key for more info.',e.rawAttrsMap["v-for"],!0),e.forProcessed=!0,(n||"_l")+"(("+a+"),function("+i+o+s+"){return "+(t||Tt)(e,r)+"})"}function Nt(e,r){var t="{",n=function(e,r){var t=e.directives;if(t){var n,a,i,o,s="directives:[",l=!1;for(n=0,a=t.length;n<a;n++){i=t[n],o=!0;var c=r.directives[i.name];c&&(o=!!c(e,i,r.warn)),o&&(l=!0,s+='{name:"'+i.name+'",rawName:"'+i.rawName+'"'+(i.value?",value:("+i.value+"),expression:"+JSON.stringify(i.value):"")+(i.arg?",arg:"+(i.isDynamicArg?i.arg:'"'+i.arg+'"'):"")+(i.modifiers?",modifiers:"+JSON.stringify(i.modifiers):"")+"},")}return l?s.slice(0,-1)+"]":void 0}}(e,r);n&&(t+=n+","),e.key&&(t+="key:"+e.key+","),e.ref&&(t+="ref:"+e.ref+","),e.refInFor&&(t+="refInFor:true,"),e.pre&&(t+="pre:true,"),e.component&&(t+='tag:"'+e.tag+'",');for(var a=0;a<r.dataGenFns.length;a++)t+=r.dataGenFns[a](e);if(e.attrs&&(t+="attrs:"+Gt(e.attrs)+","),e.props&&(t+="domProps:"+Gt(e.props)+","),e.events&&(t+=kt(e.events,!1)+","),e.nativeEvents&&(t+=kt(e.nativeEvents,!0)+","),e.slotTarget&&!e.slotScope&&(t+="slot:"+e.slotTarget+","),e.scopedSlots&&(t+=function(e,r,t){var n=e.for||Object.keys(r).some((function(e){var t=r[e];return t.slotTargetDynamic||t.if||t.for||Ot(t)})),a=!!e.if;if(!n)for(var i=e.parent;i;){if(i.slotScope&&i.slotScope!==Zr||i.for){n=!0;break}i.if&&(a=!0),i=i.parent}var o=Object.keys(r).map((function(e){return Ut(r[e],t)})).join(",");return"scopedSlots:_u(["+o+"]"+(n?",null,true":"")+(!n&&a?",null,false,"+function(e){for(var r=5381,t=e.length;t;)r=33*r^e.charCodeAt(--t);return r>>>0}(o):"")+")"}(e,e.scopedSlots,r)+","),e.model&&(t+="model:{value:"+e.model.value+",callback:"+e.model.callback+",expression:"+e.model.expression+"},"),e.inlineTemplate){var i=function(e,r){var t=e.children[0];if(1===e.children.length&&1===t.type||r.warn("Inline-template components must have exactly one child element.",{start:e.start}),t&&1===t.type){var n=St(t,r.options);return"inlineTemplate:{render:function(){"+n.render+"},staticRenderFns:["+n.staticRenderFns.map((function(e){return"function(){"+e+"}"})).join(",")+"]}"}}(e,r);i&&(t+=i+",")}return t=t.replace(/,$/,"")+"}",e.dynamicAttrs&&(t="_b("+t+',"'+e.tag+'",'+Gt(e.dynamicAttrs)+")"),e.wrapData&&(t=e.wrapData(t)),e.wrapListeners&&(t=e.wrapListeners(t)),t}function Ot(e){return 1===e.type&&("slot"===e.tag||e.children.some(Ot))}function Ut(e,r){var t=e.attrsMap["slot-scope"];if(e.if&&!e.ifProcessed&&!t)return Rt(e,r,Ut,"null");if(e.for&&!e.forProcessed)return Ft(e,r,Ut);var n=e.slotScope===Zr?"":String(e.slotScope),a="function("+n+"){return "+("template"===e.tag?e.if&&t?"("+e.if+")?"+(It(e,r)||"undefined")+":undefined":It(e,r)||"undefined":Tt(e,r))+"}",i=n?"":",proxy:true";return"{key:"+(e.slotTarget||'"default"')+",fn:"+a+i+"}"}function It(e,r,t,n,a){var i=e.children;if(i.length){var o=i[0];if(1===i.length&&o.for&&"template"!==o.tag&&"slot"!==o.tag){var s=t?r.maybeComponent(o)?",1":",0":"";return""+(n||Tt)(o,r)+s}var l=t?function(e,r){for(var t=0,n=0;n<e.length;n++){var a=e[n];if(1===a.type){if(Vt(a)||a.ifConditions&&a.ifConditions.some((function(e){return Vt(e.block)}))){t=2;break}(r(a)||a.ifConditions&&a.ifConditions.some((function(e){return r(e.block)})))&&(t=1)}}return t}(i,r.maybeComponent):0,c=a||_t;return"["+i.map((function(e){return c(e,r)})).join(",")+"]"+(l?","+l:"")}}function Vt(e){return void 0!==e.for||"template"===e.tag||"slot"===e.tag}function _t(e,r){return 1===e.type?Tt(e,r):3===e.type&&e.isComment?function(e){return"_e("+JSON.stringify(e.text)+")"}(e):Mt(e)}function Mt(e){return"_v("+(2===e.type?e.expression:Pt(JSON.stringify(e.text)))+")"}function Gt(e){for(var r="",t="",n=0;n<e.length;n++){var a=e[n],i=Pt(a.value);a.dynamic?t+=a.name+","+i+",":r+='"'+a.name+'":'+i+","}return r="{"+r.slice(0,-1)+"}",t?"_d("+r+",["+t.slice(0,-1)+"])":r}function Pt(e){return e.replace(/\u2028/g,"\\u2028").replace(/\u2029/g,"\\u2029")}var jt=new RegExp("\\b"+"do,if,for,let,new,try,var,case,else,with,await,break,catch,class,const,super,throw,while,yield,delete,export,import,return,switch,default,extends,finally,continue,debugger,function,arguments".split(",").join("\\b|\\b")+"\\b"),zt=new RegExp("\\b"+"delete,typeof,void".split(",").join("\\s*\\([^\\)]*\\)|\\b")+"\\s*\\([^\\)]*\\)"),Ht=/'(?:[^'\\]|\\.)*'|"(?:[^"\\]|\\.)*"|`(?:[^`\\]|\\.)*\$\{|\}(?:[^`\\]|\\.)*`|`(?:[^`\\]|\\.)*`/g;function $t(e,r){e&&function e(r,t){if(1===r.type){for(var n in r.attrsMap)if(Ur.test(n)){var a=r.attrsMap[n];if(a){var i=r.rawAttrsMap[n];"v-for"===n?Jt(r,'v-for="'+a+'"',t,i):"v-slot"===n||"#"===n[0]?Qt(a,n+'="'+a+'"',t,i):Or.test(n)?Yt(a,n+'="'+a+'"',t,i):Kt(a,n+'="'+a+'"',t,i)}}if(r.children)for(var o=0;o<r.children.length;o++)e(r.children[o],t)}else 2===r.type&&Kt(r.expression,r.text,t,r)}(e,r)}function Yt(e,r,t,n){var a=e.replace(Ht,""),i=a.match(zt);i&&"$"!==a.charAt(i.index-1)&&t('avoid using JavaScript unary operator as property name: "'+i[0]+'" in expression '+r.trim(),n),Kt(e,r,t,n)}function Jt(e,r,t,n){Kt(e.for||"",r,t,n),Zt(e.alias,"v-for alias",r,t,n),Zt(e.iterator1,"v-for iterator",r,t,n),Zt(e.iterator2,"v-for iterator",r,t,n)}function Zt(e,r,t,n,a){if("string"==typeof e)try{new Function("var "+e+"=_")}catch(i){n("invalid "+r+' "'+e+'" in expression: '+t.trim(),a)}}function Kt(e,r,t,n){try{new Function("return "+e)}catch(i){var a=e.replace(Ht,"").match(jt);t(a?'avoid using JavaScript keyword as property name: "'+a[0]+'"\n Raw expression: '+r.trim():"invalid expression: "+i.message+" in\n\n "+e+"\n\n Raw expression: "+r.trim()+"\n",n)}}function Qt(e,r,t,n){try{new Function(e,"")}catch(a){t("invalid function parameter expression: "+a.message+" in\n\n "+e+"\n\n Raw expression: "+r.trim()+"\n",n)}}var Wt=2;function Xt(e,r,t){void 0===r&&(r=0),void 0===t&&(t=e.length);for(var n=e.split(/\r?\n/),a=0,i=[],o=0;o<n.length;o++)if((a+=n[o].length+1)>=r){for(var s=o-Wt;s<=o+Wt||t>a;s++)if(!(s<0||s>=n.length)){i.push(""+(s+1)+en(" ",3-String(s+1).length)+"| "+n[s]);var l=n[s].length;if(s===o){var c=r-(a-l)+1,u=t>a?l-c:t-r;i.push(" | "+en(" ",c)+en("^",u))}else if(s>o){if(t>a){var p=Math.min(t-a,l);i.push(" | "+en("^",p))}a+=l+1}}break}return i.join("\n")}function en(e,r){var t="";if(r>0)for(;1&r&&(t+=e),!((r>>>=1)<=0);)e+=e;return t}function rn(e,r){try{return new Function(e)}catch(t){return r.push({err:t,code:e}),A}}function tn(e){var r=Object.create(null);return function(t,n,a){var i=(n=w({},n)).warn||ue;delete n.warn;try{new Function("return 1")}catch(e){e.toString().match(/unsafe-eval|CSP/)&&i("It seems you are using the standalone build of Vue.js in an environment with Content Security Policy that prohibits unsafe-eval. The template compiler cannot work in this environment. Consider relaxing the policy to allow unsafe-eval or pre-compiling your templates into render functions.")}var o=n.delimiters?String(n.delimiters)+t:t;if(r[o])return r[o];var s=e(t,n);s.errors&&s.errors.length&&(n.outputSourceRange?s.errors.forEach((function(e){i("Error compiling template:\n\n"+e.msg+"\n\n"+Xt(t,e.start,e.end),a)})):i("Error compiling template:\n\n"+t+"\n\n"+s.errors.map((function(e){return"- "+e})).join("\n")+"\n",a)),s.tips&&s.tips.length&&(n.outputSourceRange?s.tips.forEach((function(e){return pe(e.msg,a)})):s.tips.forEach((function(e){return pe(e,a)})));var l={},c=[];return l.render=rn(s.render,c),l.staticRenderFns=s.staticRenderFns.map((function(e){return rn(e,c)})),s.errors&&s.errors.length||!c.length||i("Failed to generate render function:\n\n"+c.map((function(e){var r=e.err,t=e.code;return r.toString()+" in\n\n"+t+"\n"})).join("\n"),a),r[o]=l}}function nn(e){return function(r){function t(t,n){var a=Object.create(r),i=[],o=[],s=function(e,r,t){(t?o:i).push(e)};if(n){if(n.outputSourceRange){var l=t.match(/^\s*/)[0].length;s=function(e,r,t){var n={msg:e};r&&(null!=r.start&&(n.start=r.start+l),null!=r.end&&(n.end=r.end+l)),(t?o:i).push(n)}}for(var c in n.modules&&(a.modules=(r.modules||[]).concat(n.modules)),n.directives&&(a.directives=w(Object.create(r.directives||null),n.directives)),n)"modules"!==c&&"directives"!==c&&(a[c]=n[c])}a.warn=s;var u=e(t.trim(),a);return $t(u.ast,s),u.errors=i,u.tips=o,u}return{compile:t,compileToFunctions:tn(t)}}}var an=nn((function(e,r){var t=Qr(e.trim(),r);!1!==r.optimize&>(t,r);var n=St(t,r);return{ast:t,render:n.render,staticRenderFns:n.staticRenderFns}}))(ft),on=an.compile,sn=an.compileToFunctions,ln=p("accept,accept-charset,accesskey,action,align,alt,async,autocomplete,autofocus,autoplay,autosave,bgcolor,border,buffered,challenge,charset,checked,cite,class,code,codebase,color,cols,colspan,content,http-equiv,name,contenteditable,contextmenu,controls,coords,data,datetime,default,defer,dir,dirname,disabled,download,draggable,dropzone,enctype,method,for,form,formaction,headers,height,hidden,high,href,hreflang,http-equiv,icon,id,ismap,itemprop,keytype,kind,label,lang,language,list,loop,low,manifest,max,maxlength,media,method,GET,POST,min,multiple,email,file,muted,name,novalidate,open,optimum,pattern,ping,placeholder,poster,preload,radiogroup,readonly,rel,required,reversed,rows,rowspan,sandbox,scope,scoped,seamless,selected,shape,size,type,text,password,sizes,span,spellcheck,src,srcdoc,srclang,srcset,start,step,style,summary,tabindex,target,title,type,usemap,value,width,wrap"),cn=function(e){return ln(e)||0===e.indexOf("data-")||0===e.indexOf("aria-")},un={acceptCharset:"accept-charset",className:"class",htmlFor:"for",httpEquiv:"http-equiv"},pn={"<":"<",">":">",'"':""","&":"&"};function dn(e){return pn[e]||e}var fn=/^"(?:[^"\\]|\\.)*"$|^'(?:[^'\\]|\\.)*'$/;function mn(e,r){return fn.test(r)?(r=r.replace(/^'|'$/g,'"'),Ve(e)&&'"false"'!==r&&(r='"true"'),{type:yn,value:_e(e)?" "+e+'="'+e+'"':'""'===r?" "+e:" "+e+'="'+JSON.parse(r)+'"'}):{type:An,value:"_ssrAttr("+JSON.stringify(e)+","+r+")"}}var gn,hn={FALSE:0,FULL:1,SELF:2,CHILDREN:3,PARTIAL:4};function vn(e,r){e&&(gn=r.isReservedTag||x,function e(r,t){if(function(e){return 2!==e.type&&3!==e.type&&(d(e.tag)||!gn(e.tag)||!!e.component||function(e){return 1===e.type&&"select"===e.tag&&null!=e.directives&&e.directives.some((function(e){return"model"===e.name}))}(e))}(r))r.ssrOptimizability=hn.FALSE;else{var n=t||function(e){return 1===e.type&&e.directives&&e.directives.some((function(e){return!bn(e.name)}))}(r),a=function(e){e.ssrOptimizability!==hn.FULL&&(r.ssrOptimizability=n?hn.PARTIAL:hn.SELF)};if(n&&(r.ssrOptimizability=hn.CHILDREN),1===r.type){for(var i=0,o=r.children.length;i<o;i++){var s=r.children[i];e(s),a(s)}if(r.ifConditions)for(var l=1,c=r.ifConditions.length;l<c;l++){var u=r.ifConditions[l].block;e(u,t),a(u)}null==r.ssrOptimizability||!t&&(r.attrsMap["v-html"]||r.attrsMap["v-text"])?r.ssrOptimizability=hn.FULL:r.children=function(e){for(var r=e.children,t=[],n=[],a=function(){n.length&&t.push({type:1,parent:e,tag:"template",attrsList:[],attrsMap:{},rawAttrsMap:{},children:n,ssrOptimizability:hn.FULL}),n=[]},i=0;i<r.length;i++){var o=r[i];o.ssrOptimizability===hn.FULL?n.push(o):(a(),t.push(o))}return a(),t}(r)}else r.ssrOptimizability=hn.FULL}}(e,!0))}var bn=p("text,html,show,on,bind,model,pre,cloak,once"),yn=0,wn=1,An=2;function xn(e,r){if(e.for&&!e.forProcessed)return Ft(e,r,xn);if(e.if&&!e.ifProcessed)return Rt(e,r,xn);if("template"===e.tag&&!e.slotTarget)return e.ssrOptimizability===hn.FULL?Dn(e,r):En(e,r)||"void 0";switch(e.ssrOptimizability){case hn.FULL:return function(e,r){return"_ssrNode("+Ln(e,r)+")"}(e,r);case hn.SELF:return function(e,r){var t=En(e,r,!0);return"_ssrNode("+Bn(Tn(e,r))+',"</'+e.tag+'>"'+(t?","+t:"")+")"}(e,r);case hn.CHILDREN:return kn(e,r,!0);case hn.PARTIAL:return kn(e,r,!1);default:return Tt(e,r)}}function kn(e,r,t){var n=e.plain?void 0:Nt(e,r),a=t?"["+Dn(e,r)+"]":En(e,r,!0);return"_c('"+e.tag+"'"+(n?","+n:"")+(a?","+a:"")+")"}function En(e,r,t){return It(e,r,t,xn,qn)}function qn(e,r){return 1===e.type?xn(e,r):Mt(e)}function Dn(e,r){return e.children.length?"_ssrNode("+Bn(Cn(e,r))+")":""}function Ln(e,r){return"("+Bn(Sn(e,r))+")"}function Sn(e,r){if(e.for&&!e.forProcessed)return e.forProcessed=!0,[{type:An,value:Ft(e,r,Ln,"_ssrList")}];if(e.if&&!e.ifProcessed)return e.ifProcessed=!0,[{type:An,value:Rt(e,r,Ln,'"\x3c!----\x3e"')}];if("template"===e.tag)return Cn(e,r);var t=Tn(e,r),n=Cn(e,r),a=r.options.isUnaryTag,i=a&&a(e.tag)?[]:[{type:yn,value:"</"+e.tag+">"}];return t.concat(n,i)}function Tn(e,r){var t;!function(e,r){if(e.directives)for(var t=0;t<e.directives.length;t++){var n=e.directives[t];if("model"===n.name){r.directives.model(e,n,r.warn),"textarea"===e.tag&&e.props&&(e.props=e.props.filter((function(e){return"value"!==e.name})));break}}}(e,r);var n,a,i,o,s,l,c=[{type:yn,value:"<"+e.tag}];return e.attrs&&c.push.apply(c,e.attrs.map((function(e){return mn(e.name,e.value)}))),e.props&&c.push.apply(c,function(e,r){var t=[];return e.forEach((function(e){var n=e.name,a=e.value;n=un[n]||n.toLowerCase(),!cn(n)||r&&r.some((function(e){return e.name===n}))||t.push(mn(n,a))})),t}(e.props,e.attrs)),(t=e.attrsMap["v-bind"])&&c.push({type:An,value:"_ssrAttrs("+t+")"}),(t=e.attrsMap["v-bind.prop"])&&c.push({type:An,value:"_ssrDOMProps("+t+")"}),(e.staticClass||e.classBinding)&&c.push.apply(c,(n=e.staticClass,a=e.classBinding,n&&!a?[{type:yn,value:' class="'+JSON.parse(n)+'"'}]:[{type:An,value:"_ssrClass("+(n||"null")+","+(a||"null")+")"}])),(e.staticStyle||e.styleBinding||e.attrsMap["v-show"])&&c.push.apply(c,(i=e.attrsMap.style,o=e.staticStyle,s=e.styleBinding,l=e.attrsMap["v-show"],!i||s||l?[{type:An,value:"_ssrStyle("+(o||"null")+","+(s||"null")+", "+(l?"{ display: ("+l+") ? '' : 'none' }":"null")+")"}]:[{type:yn,value:" style="+JSON.stringify(i)}])),r.options.scopeId&&c.push({type:yn,value:" "+r.options.scopeId}),c.push({type:yn,value:">"}),c}function Cn(e,r){var t;return(t=e.attrsMap["v-html"])?[{type:An,value:"_s("+t+")"}]:(t=e.attrsMap["v-text"])?[{type:wn,value:"_s("+t+")"}]:"textarea"===e.tag&&(t=e.attrsMap["v-model"])?[{type:wn,value:"_s("+t+")"}]:e.children?function(e,r){for(var t=[],n=0;n<e.length;n++){var a=e[n];if(1===a.type)t.push.apply(t,Sn(a,r));else if(2===a.type)t.push({type:wn,value:a.expression});else if(3===a.type){var i=a.text.replace(/[<>"&]/g,dn);a.isComment&&(i="\x3c!--"+i+"--\x3e"),t.push({type:yn,value:i})}}return t}(e.children,r):[]}function Bn(e){for(var r=[],t="",n=function(){t&&(r.push(JSON.stringify(t)),t="")},a=0;a<e.length;a++){var i=e[a];i.type===yn?t+=i.value:i.type===wn?(n(),r.push("_ssrEscape("+i.value+")")):i.type===An&&(n(),r.push("("+i.value+")"))}return n(),r.join("+")}var Rn=nn((function(e,r){var t=Qr(e.trim(),r);vn(t,r);var n=function(e,r){var t=new Lt(r);return{render:"with(this){return "+(e?xn(e,t):'_c("div")')+"}",staticRenderFns:t.staticRenderFns}}(t,r);return{ast:t,render:n.render,staticRenderFns:n.staticRenderFns}}))(ft),Fn=Rn.compile,Nn=Rn.compileToFunctions;r.parseComponent=function(e,r){void 0===r&&(r={});var t={template:null,script:null,styles:[],customBlocks:[],errors:[]},n=0,a=null,i=function(e){t.errors.push(e)};return r.outputSourceRange&&(i=function(e,r){var n={msg:e};null!=r.start&&(n.start=r.start),null!=r.end&&(n.end=r.end),t.errors.push(n)}),H(e,{warn:i,start:function(e,r,i,o,s){0===n&&(a={type:e,content:"",start:s,attrs:r.reduce((function(e,r){var t=r.name,n=r.value;return e[t]=n||!0,e}),{})},Z(e)?(function(e,r){for(var t=0;t<r.length;t++){var n=r[t];"lang"===n.name&&(e.lang=n.value),"scoped"===n.name&&(e.scoped=!0),"module"===n.name&&(e.module=n.value||!0),"src"===n.name&&(e.src=n.value)}}(a,r),"style"===e?t.styles.push(a):t[e]=a):t.customBlocks.push(a)),i||n++},end:function(t,i){if(1===n&&a){a.end=i;var s=e.slice(a.start,a.end);!1!==r.deindent&&(s=o(s)),"template"!==a.type&&r.pad&&(s=function(r,t){if("space"===t)return e.slice(0,r.start).replace(J," ");var n=e.slice(0,r.start).split(Y).length,a="script"!==r.type||r.lang?"\n":"//\n";return Array(n).join(a)}(a,r.pad)+s),a.content=s,a=null}n--},outputSourceRange:r.outputSourceRange}),t},r.compile=on,r.compileToFunctions=sn,r.ssrCompile=Fn,r.ssrCompileToFunctions=Nn,r.generateCodeFrame=Xt,Object.defineProperty(r,"__esModule",{value:!0})})(r)}).call(this,t("yLpj"),t("URgk").setImmediate)},YBdB:function(e,r,t){(function(e,r){!function(e,t){"use strict";if(!e.setImmediate){var n,a,i,o,s,l=1,c={},u=!1,p=e.document,d=Object.getPrototypeOf&&Object.getPrototypeOf(e);d=d&&d.setTimeout?d:e,"[object process]"==={}.toString.call(e.process)?n=function(e){r.nextTick((function(){m(e)}))}:!function(){if(e.postMessage&&!e.importScripts){var r=!0,t=e.onmessage;return e.onmessage=function(){r=!1},e.postMessage("","*"),e.onmessage=t,r}}()?e.MessageChannel?((i=new MessageChannel).port1.onmessage=function(e){m(e.data)},n=function(e){i.port2.postMessage(e)}):p&&"onreadystatechange"in p.createElement("script")?(a=p.documentElement,n=function(e){var r=p.createElement("script");r.onreadystatechange=function(){m(e),r.onreadystatechange=null,a.removeChild(r),r=null},a.appendChild(r)}):n=function(e){setTimeout(m,0,e)}:(o="setImmediate$"+Math.random()+"$",s=function(r){r.source===e&&"string"==typeof r.data&&0===r.data.indexOf(o)&&m(+r.data.slice(o.length))},e.addEventListener?e.addEventListener("message",s,!1):e.attachEvent("onmessage",s),n=function(r){e.postMessage(o+r,"*")}),d.setImmediate=function(e){"function"!=typeof e&&(e=new Function(""+e));for(var r=new Array(arguments.length-1),t=0;t<r.length;t++)r[t]=arguments[t+1];var a={callback:e,args:r};return c[l]=a,n(l),l++},d.clearImmediate=f}function f(e){delete c[e]}function m(e){if(u)setTimeout(m,0,e);else{var r=c[e];if(r){u=!0;try{!function(e){var r=e.callback,n=e.args;switch(n.length){case 0:r();break;case 1:r(n[0]);break;case 2:r(n[0],n[1]);break;case 3:r(n[0],n[1],n[2]);break;default:r.apply(t,n)}}(r)}finally{f(e),u=!1}}}}}("undefined"==typeof self?void 0===e?this:e:self)}).call(this,t("yLpj"),t("8oxB"))},"aET+":function(e,r,t){var n,a,i={},o=(n=function(){return window&&document&&document.all&&!window.atob},function(){return void 0===a&&(a=n.apply(this,arguments)),a}),s=function(e,r){return r?r.querySelector(e):document.querySelector(e)},l=function(e){var r={};return function(e,t){if("function"==typeof e)return e();if(void 0===r[e]){var n=s.call(this,e,t);if(window.HTMLIFrameElement&&n instanceof window.HTMLIFrameElement)try{n=n.contentDocument.head}catch(e){n=null}r[e]=n}return r[e]}}(),c=null,u=0,p=[],d=t("9tPo");function f(e,r){for(var t=0;t<e.length;t++){var n=e[t],a=i[n.id];if(a){a.refs++;for(var o=0;o<a.parts.length;o++)a.parts[o](n.parts[o]);for(;o<n.parts.length;o++)a.parts.push(y(n.parts[o],r))}else{var s=[];for(o=0;o<n.parts.length;o++)s.push(y(n.parts[o],r));i[n.id]={id:n.id,refs:1,parts:s}}}}function m(e,r){for(var t=[],n={},a=0;a<e.length;a++){var i=e[a],o=r.base?i[0]+r.base:i[0],s={css:i[1],media:i[2],sourceMap:i[3]};n[o]?n[o].parts.push(s):t.push(n[o]={id:o,parts:[s]})}return t}function g(e,r){var t=l(e.insertInto);if(!t)throw new Error("Couldn't find a style target. This probably means that the value for the 'insertInto' parameter is invalid.");var n=p[p.length-1];if("top"===e.insertAt)n?n.nextSibling?t.insertBefore(r,n.nextSibling):t.appendChild(r):t.insertBefore(r,t.firstChild),p.push(r);else if("bottom"===e.insertAt)t.appendChild(r);else{if("object"!=typeof e.insertAt||!e.insertAt.before)throw new Error("[Style Loader]\n\n Invalid value for parameter 'insertAt' ('options.insertAt') found.\n Must be 'top', 'bottom', or Object.\n (https://github.com/webpack-contrib/style-loader#insertat)\n");var a=l(e.insertAt.before,t);t.insertBefore(r,a)}}function h(e){if(null===e.parentNode)return!1;e.parentNode.removeChild(e);var r=p.indexOf(e);r>=0&&p.splice(r,1)}function v(e){var r=document.createElement("style");if(void 0===e.attrs.type&&(e.attrs.type="text/css"),void 0===e.attrs.nonce){var n=function(){0;return t.nc}();n&&(e.attrs.nonce=n)}return b(r,e.attrs),g(e,r),r}function b(e,r){Object.keys(r).forEach((function(t){e.setAttribute(t,r[t])}))}function y(e,r){var t,n,a,i;if(r.transform&&e.css){if(!(i="function"==typeof r.transform?r.transform(e.css):r.transform.default(e.css)))return function(){};e.css=i}if(r.singleton){var o=u++;t=c||(c=v(r)),n=x.bind(null,t,o,!1),a=x.bind(null,t,o,!0)}else e.sourceMap&&"function"==typeof URL&&"function"==typeof URL.createObjectURL&&"function"==typeof URL.revokeObjectURL&&"function"==typeof Blob&&"function"==typeof btoa?(t=function(e){var r=document.createElement("link");return void 0===e.attrs.type&&(e.attrs.type="text/css"),e.attrs.rel="stylesheet",b(r,e.attrs),g(e,r),r}(r),n=E.bind(null,t,r),a=function(){h(t),t.href&&URL.revokeObjectURL(t.href)}):(t=v(r),n=k.bind(null,t),a=function(){h(t)});return n(e),function(r){if(r){if(r.css===e.css&&r.media===e.media&&r.sourceMap===e.sourceMap)return;n(e=r)}else a()}}e.exports=function(e,r){if("undefined"!=typeof DEBUG&&DEBUG&&"object"!=typeof document)throw new Error("The style-loader cannot be used in a non-browser environment");(r=r||{}).attrs="object"==typeof r.attrs?r.attrs:{},r.singleton||"boolean"==typeof r.singleton||(r.singleton=o()),r.insertInto||(r.insertInto="head"),r.insertAt||(r.insertAt="bottom");var t=m(e,r);return f(t,r),function(e){for(var n=[],a=0;a<t.length;a++){var o=t[a];(s=i[o.id]).refs--,n.push(s)}e&&f(m(e,r),r);for(a=0;a<n.length;a++){var s;if(0===(s=n[a]).refs){for(var l=0;l<s.parts.length;l++)s.parts[l]();delete i[s.id]}}}};var w,A=(w=[],function(e,r){return w[e]=r,w.filter(Boolean).join("\n")});function x(e,r,t,n){var a=t?"":n.css;if(e.styleSheet)e.styleSheet.cssText=A(r,a);else{var i=document.createTextNode(a),o=e.childNodes;o[r]&&e.removeChild(o[r]),o.length?e.insertBefore(i,o[r]):e.appendChild(i)}}function k(e,r){var t=r.css,n=r.media;if(n&&e.setAttribute("media",n),e.styleSheet)e.styleSheet.cssText=t;else{for(;e.firstChild;)e.removeChild(e.firstChild);e.appendChild(document.createTextNode(t))}}function E(e,r,t){var n=t.css,a=t.sourceMap,i=void 0===r.convertToAbsoluteUrls&&a;(r.convertToAbsoluteUrls||i)&&(n=d(n)),a&&(n+="\n/*# sourceMappingURL=data:application/json;base64,"+btoa(unescape(encodeURIComponent(JSON.stringify(a))))+" */");var o=new Blob([n],{type:"text/css"}),s=e.href;e.href=URL.createObjectURL(o),s&&URL.revokeObjectURL(s)}},mUkR:function(e,r,t){"use strict";t.r(r);t("XOgp"),t("u+OR");$((function(){$('[data-toggle="tooltip"]').tooltip()}));var n=L.map("map").setView([10.6079209,105.1175397],11),a=document.getElementById("close-legend-btn"),i=document.getElementById("close-chart-btn"),o=document.getElementById("btn-legend"),s=document.getElementById("legend-panel"),l=document.getElementById("close-btn"),c=document.getElementById("panel"),u=document.getElementById("show-btn"),p=(document.getElementById("accept-new-info"),document.getElementById("panel-title")),d=document.getElementById("layer-content"),f=document.getElementById("info-content"),m=(document.getElementById("input-name"),document.getElementById("input-info"),document.getElementById("input-name-show")),g=document.getElementById("input-info-show"),h=document.getElementById("input-doan-show"),v=document.getElementById("input-mota-show"),b=document.getElementById("input-diadiem-show"),y=document.getElementById("input-chieudai-show"),w=document.getElementById("input-kcnguyhiem-show"),A=document.getElementById("input-kcnantoan-show"),x=document.getElementById("input-tocdo-show"),k=document.getElementById("input-mucdo-show"),E=document.getElementById("year-matcat"),q=document.getElementById("img-slider"),D=document.getElementById("swiper-container"),S=document.getElementById("container-chart"),T=document.getElementById("form-marker"),C=document.getElementById("form-doansl"),B="https://satlo.angiang.gov.vn/",R="https://satlo.angiang.gov.vn:8443/",F="",N=8,O=[],U=[],I=L.layerGroup(),V=L.layerGroup();U=L.layerGroup();new L.icon({iconUrl:"/images/icon-red.png",iconSize:[30,35],iconAnchor:[15,40],popupAnchor:[0,-40]});var _="https://satlo.angiang.gov.vn:8443/geoserver/angiang/wms",M="/storage/uploadedimages/",G=L.tileLayer("http://{s}.google.com/vt/lyrs=s,h&x={x}&y={y}&z={z}",{maxZoom:21,subdomains:["mt0","mt1","mt2","mt3"]}).addTo(n),P=L.tileLayer("https://api.tiles.mapbox.com/v4/{id}/{z}/{x}/{y}.png?access_token=pk.eyJ1IjoibWFwYm94IiwiYSI6ImNpejY4NXVycTA2emYycXBndHRqcmZ3N3gifQ.rJcFIG214AriISLbB6B5aw",{maxZoom:21,id:"mapbox.streets"}),j=L.control({position:"bottomleft"});j.onAdd=function(e){var r=L.DomUtil.create("div");return r.innerHTML='\n <div value="basemap" id="switchwrapper" class="switchwrapper">\n <figure id="googlepic" class="item-wrapper" style="display: none;">\n <figcaption class="item-title">\n <span class="item-text">Vệ tinh</span></figcaption>\n <img class="item-img" src="/images/earth-layer.png" alt="Bản đồ" title="Satellite">\n </figure>\n <figure id="basepic" class="item-wrapper" >\n <figcaption class="item-title">\n <span class="item-text">Bản đồ</span></figcaption>\n <img class="item-img" src="/images/base-layer.png" alt="Mapbox" title="Base map">\n </figure>\n </div>',r},j.addTo(n),document.getElementById("googlepic").addEventListener("click",(function(e){n.removeLayer(P),n.addLayer(G),G.setZIndex(-1),document.getElementById("switchwrapper").setAttribute("value","googlemap"),document.getElementById("googlepic").style.display="none",document.getElementById("basepic").style.display="block";for(var r=document.getElementsByClassName("buttonText"),t=0;t<r.length;t++)r[t].style.color="#ffff"})),document.getElementById("basepic").addEventListener("click",(function(e){n.removeLayer(G),n.addLayer(P),P.setZIndex(-1),document.getElementById("switchwrapper").setAttribute("value","basemap"),document.getElementById("googlepic").style.display="block",document.getElementById("basepic").style.display="none";for(var r=document.getElementsByClassName("buttonText"),t=0;t<r.length;t++)r[t].style.color="#000000"})),l.addEventListener("click",Y.bind(void 0)),i.addEventListener("click",Y.bind(void 0)),u.addEventListener("click",J.bind(void 0)),o.addEventListener("click",function(){s.style.right="10px"}.bind(void 0)),a.addEventListener("click",function(){s.style.right="-450px"}.bind(void 0));var z,H=document.getElementsByClassName("caret");for(z=0;z<H.length;z++)H[z].addEventListener("click",(function(){this.parentElement.querySelector(".nested").classList.toggle("active"),this.classList.toggle("caret-down")}));function Y(){c.style.right="-450px",u.classList.remove("hidden"),D.classList.add("hidden"),S.classList.add("hidden")}function J(){c.style.right="10px",p.innerHTML="Các lớp layer",u.classList.add("hidden"),f.classList.add("hidden"),d.classList.remove("hidden")}function Z(e,r){r.on("click",(function(r){J(),S.classList.add("hidden");console.log(e.properties);var t=e.properties.Id;p.innerHTML="Thông tin điểm khảo sát",T.classList.remove("hidden"),C.classList.add("hidden"),d.classList.add("hidden"),f.classList.remove("hidden"),q.innerHTML="",m.value=e.properties.Name,g.value=e.properties.Info,null==e.properties.Photos?D.classList.add("hidden"):D.classList.remove("hidden");var n=[];null!=e.properties.Photos&&(n=JSON.parse(e.properties.Photos).img);for(var a=0;a<n.length;a++)Q(t,0==a,"img",n[a],"diemanhks")}))}function K(e,r){"Rất nguy hiểm"==e.properties.Mucdo?r.setStyle({color:"#ff3c00",weight:5}):"Nguy hiểm"==e.properties.Mucdo?r.setStyle({color:"#ffbf00",weight:5}):r.setStyle({color:"#ff7b00",weight:5}),r.on("mouseover",(function(e){this.setStyle({weight:N})})),r.on("mouseout",(function(e){this.setStyle({weight:5})})),r.on("click",(function(r){J(),S.classList.add("hidden");console.log(e.properties);var t=e.properties.Id;p.innerHTML="Thông tin điểm khảo sát",T.classList.add("hidden"),C.classList.remove("hidden"),d.classList.add("hidden"),f.classList.remove("hidden"),q.innerHTML="",h.value=e.properties.Name,v.value=e.properties.Info,b.value=e.properties.Diadiem,y.value=e.properties.Chieudai,w.value=e.properties.Kcnguyhiem,A.value=e.properties.Kcantoan,x.value=e.properties.Tocdo,k.value=e.properties.Mucdo,null==e.properties.Photos?D.classList.add("hidden"):D.classList.remove("hidden");var n=[];null!=e.properties.Photos&&(n=JSON.parse(e.properties.Photos).img);for(var a=0;a<n.length;a++)Q(t,0==a,"img",n[a],"doansl")}))}function Q(e,r,t,n,a){var i=document.createElement("div");i.className=1==r?"carousel-item active":"carousel-item";var o=document.createElement("img");o.className="d-block w-100",o.style.height="300px",o.src=M+a+"/"+e+"/"+t+"/"+n,i.appendChild(o),q.appendChild(i)}axios({method:"get",url:B+"api/get-all-data"}).then((function(e){var r=e.data.diemanhks,t=e.data.diemsl,a=e.data.doansl;console.log(e.data),O=[];for(var i=[],o=[],s=0;s<a.length;s++)a[s].mucdo,i.push({type:"Feature",geometry:{type:JSON.parse(a[s].st_asgeojson).type,coordinates:JSON.parse(a[s].st_asgeojson).coordinates},properties:{Name:a[s].tendoan,Info:a[s].mota,Diadiem:a[s].diadiem,Chieudai:a[s].chieudai,Kcnguyhiem:a[s].kc_nguyhiem,Kcantoan:a[s].kc_antoan,Tocdo:a[s].tocdo,Mucdo:a[s].mucdo,Id:a[s].gid,Photos:a[s].photos}});for(s=0;s<i.length;s++){var l=L.geoJson(i[s],{onEachFeature:K.bind(this)});V.addLayer(l)}for(s=0;s<t.length;s++)o.push({type:"Feature",geometry:{type:JSON.parse(t[s].st_asgeojson).type,coordinates:JSON.parse(t[s].st_asgeojson).coordinates},properties:{Name:t[s].name,Info:t[s].info,Id:t[s].gid,Photos:t[s].photos}});for(s=0;s<r.length;s++)O.push({type:"Feature",geometry:{type:JSON.parse(r[s].st_asgeojson).type,coordinates:JSON.parse(r[s].st_asgeojson).coordinates},properties:{Name:r[s].name,Info:r[s].info,Id:r[s].gid,Photos:r[s].photos}});for(s=0;s<O.length;s++){var c=L.geoJson(O[s],{onEachFeature:Z.bind(this)});U.addLayer(c)}I.addTo(n),V.addTo(n)}));var W=L.tileLayer.wms(_,{Format:"image/png",Layers:"angiang:dangsau_2009_line",Version:"1.1.1",Transparent:!0,SRS:"EPSG:900913",maxZoom:21}),X=L.tileLayer(R+"geoserver/gwc/service/wmts?layer=angiang%3Adiemdosau_2019_point&style=&tilematrixset=EPSG%3A900913&Service=WMTS&Request=GetTile&Version=1.0.0&Format=image%2Fpng&TileMatrix=EPSG%3A900913%3A{z}&TileCol={x}&TileRow={y}"),ee=L.tileLayer(R+"geoserver/gwc/service/wmts?layer=angiang%3Adiemdosau_2009_point&style=&tilematrixset=EPSG%3A900913&Service=WMTS&Request=GetTile&Version=1.0.0&Format=image%2Fpng&TileMatrix=EPSG%3A900913%3A{z}&TileCol={x}&TileRow={y}"),re=L.tileLayer(R+"geoserver/gwc/service/wmts?layer=angiang%3Asatlo_mohinhthuyluc_line&style=&tilematrixset=EPSG%3A900913&Service=WMTS&Request=GetTile&Version=1.0.0&Format=image%2Fpng&TileMatrix=EPSG%3A900913%3A{z}&TileCol={x}&TileRow={y}"),te=L.tileLayer(R+"geoserver/gwc/service/wmts?layer=angiang%3Asatlo_truottongthe_line&style=&tilematrixset=EPSG%3A900913&Service=WMTS&Request=GetTile&Version=1.0.0&Format=image%2Fpng&TileMatrix=EPSG%3A900913%3A{z}&TileCol={x}&TileRow={y}"),ne=L.tileLayer(R+"geoserver/gwc/service/wmts?layer=angiang%3Asatloduongbo_gis_line&style=&tilematrixset=EPSG%3A900913&Service=WMTS&Request=GetTile&Version=1.0.0&Format=image%2Fpng&TileMatrix=EPSG%3A900913%3A{z}&TileCol={x}&TileRow={y}"),ae=L.tileLayer.wms(_,{Format:"image/png",Layers:"angiang:u_diem_mc_moi",Version:"1.1.1",Transparent:!0,SRS:"EPSG:900913",maxZoom:21}),ie=(L.tileLayer.wms(_,{Format:"image/png",Layers:"angiang:u_anh",Version:"1.1.1",Transparent:!0,SRS:"EPSG:900913",maxZoom:21}),L.tileLayer.wms(_,{Format:"image/png",Layers:"angiang:u_diem_sat_lo",Version:"1.1.1",Transparent:!0,SRS:"EPSG:900913",maxZoom:21})),oe=(L.tileLayer.wms(_,{Format:"image/png",Layers:"angiang:doan_sat_lo",Version:"1.1.1",Transparent:!0,SRS:"EPSG:900913",maxZoom:21}),L.tileLayer.wms(_,{Format:"image/png",Layers:"angiang:u_tram_do_thuy_van",Version:"1.1.1",Transparent:!0,SRS:"EPSG:900913",maxZoom:21})),se=L.tileLayer(R+"geoserver/gwc/service/wmts?layer=angiang%3Adem_2009&style=&tilematrixset=EPSG%3A900913&Service=WMTS&Request=GetTile&Version=1.0.0&Format=image%2Fpng&TileMatrix=EPSG%3A900913%3A{z}&TileCol={x}&TileRow={y}"),le=L.tileLayer(R+"geoserver/gwc/service/wmts?layer=angiang%3Adem_2019&style=&tilematrixset=EPSG%3A900913&Service=WMTS&Request=GetTile&Version=1.0.0&Format=image%2Fpng&TileMatrix=EPSG%3A900913%3A{z}&TileCol={x}&TileRow={y}"),ce=L.tileLayer.wms(_,{Format:"image/png",Layers:"angiang:quy_hoach_khai_thac_cat_th",Version:"1.1.1",Transparent:!0,SRS:"EPSG:900913",maxZoom:21}),ue=L.tileLayer.wms(_,{Format:"image/png",Layers:"angiang:duongbinhdo_dangsau2019",Version:"1.1.1",Transparent:!0,SRS:"EPSG:900913",maxZoom:21}).addTo(n),pe=L.tileLayer.wms(_,{Format:"image/png",Layers:"angiang:duongbinhdo_dangsau2009",Version:"1.1.1",Transparent:!0,SRS:"EPSG:900913",maxZoom:21}),de=L.tileLayer(R+"geoserver/gwc/service/wmts?layer=angiang%3Adu_bao_long_dan_2030&style=&tilematrixset=EPSG%3A900913&Service=WMTS&Request=GetTile&Version=1.0.0&Format=image%2Fpng&TileMatrix=EPSG%3A900913%3A{z}&TileCol={x}&TileRow={y}"),fe=L.tileLayer(R+"geoserver/gwc/service/wmts?layer=angiang%3Adu_bao_long_dan_2025&style=&tilematrixset=EPSG%3A900913&Service=WMTS&Request=GetTile&Version=1.0.0&Format=image%2Fpng&TileMatrix=EPSG%3A900913%3A{z}&TileCol={x}&TileRow={y}"),me=L.tileLayer.wms(_,{Format:"image/png",Layers:"angiang:thuadat",Version:"1.1.1",Transparent:!0,SRS:"EPSG:900913",maxZoom:21}),ge={minZoom:10,maxZoom:16,opacity:1,attribution:'Rendered with <a href="http://www.maptiler.com/">MapTiler Desktop</a>',tms:!1},he=L.tileLayer("/storage/bandobosungvadieuchinh/{z}/{x}/{y}.png",ge),ve=L.tileLayer("/storage/bandophantich/{z}/{x}/{y}.png",ge),be=L.tileLayer("/storage/bandotheoketqua/{z}/{x}/{y}.png",ge);function ye(e,r,t){1==t?(t=$(this).is(":checked"),r.addLayer(e)):(t=$(this).is(":checked"),r.removeLayer(e))}$("#rungngapman").on("change",(function(){ye(U,n,this.checked)})),$("#bandobosung").on("change",(function(){ye(he,n,this.checked)})),$("#bandoketqua").on("change",(function(){ye(be,n,this.checked)})),$("#bandophantich").on("change",(function(){ye(ve,n,this.checked)})),$("#2009line").on("change",(function(){ye(W,n,this.checked)})),$("#diemdosau20019").on("change",(function(){ye(X,n,this.checked)})),$("#satlomohinhthuyluch").on("change",(function(){ye(re,n,this.checked)})),$("#diemdosau").on("change",(function(){ye(ee,n,this.checked)})),$("#satlotruottongthe").on("change",(function(){ye(te,n,this.checked)})),$("#satloduongbo").on("change",(function(){ye(ne,n,this.checked)})),$("#diemmatcatmoi").on("change",(function(){ye(ae,n,this.checked)})),$("#diemsatlo").on("change",(function(){ye(ie,n,this.checked)})),$("#doansatlo").on("change",(function(){ye(V,n,this.checked)})),$("#tramdothuyvan").on("change",(function(){ye(oe,n,this.checked)})),$("#dem_2009").on("change",(function(){ye(se,n,this.checked)})),$("#dem_2019").on("change",(function(){ye(le,n,this.checked)})),$("#quy_hoach_khai_thac_cat_th").on("change",(function(){ye(ce,n,this.checked)})),$("#duongbinhdo_dangsau2019").on("change",(function(){ye(ue,n,this.checked)})),$("#duongbinhdo_dangsau2009").on("change",(function(){ye(pe,n,this.checked)})),$("#du_bao_long_dan_2030").on("change",(function(){ye(de,n,this.checked)})),$("#du_bao_long_dan_2025").on("change",(function(){ye(fe,n,this.checked)})),$("#thuadat").on("change",(function(){ye(me,n,this.checked)})),$("#vantocnam2010").on("change",(function(){ye(vantocnam2010,n,this.checked)})),$("#vantocnam2013").on("change",(function(){ye(vantocnam2013,n,this.checked)})),n.addControl(new L.Control.Fullscreen);L.control.ruler({lengthUnit:{factor:1e3,display:"m",decimal:2,label:"Khoảng cách"}}).addTo(n),L.control.locate().addTo(n),n.pm.addControls({position:"topleft",drawCircle:!1});var we=L.esri.Geocoding.geosearch().addTo(n),Ae=2009,xe=document.getElementsByClassName("leaflet-pm-icon-polyline")[0],ke=document.getElementById("btn-mc-2009"),Ee=document.getElementById("btn-mc-2019");ke.addEventListener("click",(function(){xe.click(),Ae=2009,E.innerHTML="2009"})),Ee.addEventListener("click",(function(){xe.click(),Ae=2019,E.innerHTML="2019"}));var qe=L.layerGroup().addTo(n);we.on("results",(function(e){qe.clearLayers();for(var r=e.results.length-1;r>=0;r--)qe.addLayer(L.marker(e.results[r].latlng))})),n.on("pm:create",(function(e){console.log(e.layer),n.removeLayer(e.layer);var r="",t=B+"api/get-matcat";t=2009==Ae?B+"api/get-matcat/dem2009":B+"api/get-matcat/dem2019";for(var a=new FormData,i=e.layer.getLatLngs(),o=0;o<i.length;o++)o==i.length-1?r+=i[o].lng+" "+i[o].lat:r+=i[o].lng+" "+i[o].lat+",";F=r,a.set("linestring",F),axios({method:"post",url:t,data:a,headers:{"Content-Type":"multipart/form-data"}}).then((function(e){alert("Biểu đồ mặt cắt được cập nhật"),e.data[0].values.length<1?(S.classList.add("hidden"),alert("Vui lòng vẽ lại mặt cắt")):S.classList.remove("hidden"),function(e){document.getElementById("chart").innerHTML="";var r=e.data,t=window.matchMedia("(max-width: 1600px)"),n=800,a=300;t.matches&&(n=400,a=200);var i=0,o=0;r.forEach((function(e){e.values.forEach((function(e){i=e.x>i?e.x:i,o=e.y<o?e.y:o}))}));var s=d3.scaleLinear().domain([0,i]).range([0,n-50]),l=d3.scaleLinear().domain([o,0]).range([a-50,0]),c=d3.scaleOrdinal(d3.schemeCategory10),u=d3.select("#chart").append("svg").attr("width",n+50+"px").attr("height",a+50+"px").append("g").attr("transform","translate(".concat(50,", ").concat(50,")")),p=d3.line().x((function(e){return s(e.x)})).y((function(e){return l(e.y)})),d=u.append("g").attr("class","lines");d.selectAll(".line-group").data(r).enter().append("g").attr("class","line-group").on("mouseover",(function(e,r){u.append("text").attr("class","title-text").style("fill",c(r)).text(e.dem).attr("text-anchor","middle").attr("x",(n-50)/2).attr("y",5)})).on("mouseout",(function(e){u.select(".title-text").remove()})).append("path").attr("class","line").attr("d",(function(e){return p(e.values)})).style("stroke",(function(e,r){return c(r)})).style("opacity","0.25").on("mouseover",(function(e){d3.selectAll(".line").style("opacity","0.1"),d3.selectAll(".circle").style("opacity","0.25"),d3.select(this).style("opacity","0.85").style("stroke-width","2.5px").style("cursor","pointer")})).on("mouseout",(function(e){d3.selectAll(".line").style("opacity","0.25"),d3.selectAll(".circle").style("opacity","0.85"),d3.select(this).style("stroke-width","1.5px").style("cursor","none")})),d.selectAll("circle-group").data(r).enter().append("g").style("fill",(function(e,r){return c(r)})).selectAll("circle").data((function(e){return e.values})).enter().append("g").attr("class","circle").on("mouseover",(function(e){d3.select(this).style("cursor","pointer").append("text").attr("class","text").text("".concat(e.y)).attr("x",(function(e){return s(e.x)+5})).attr("y",(function(e){return l(e.y)-10}))})).on("mouseout",(function(e){d3.select(this).style("cursor","none").transition().duration(250).selectAll(".text").remove()})).append("circle").attr("cx",(function(e){return s(e.x)})).attr("cy",(function(e){return l(e.y)})).attr("r",3).style("opacity","0.85").on("mouseover",(function(e){d3.select(this).transition().duration(250).attr("r",6)})).on("mouseout",(function(e){d3.select(this).transition().duration(250).attr("r",3)}));var f=d3.axisBottom(s).ticks(10),m=d3.axisLeft(l).ticks(10);u.append("g").attr("class","x axis").attr("transform","translate(0, ".concat(a-50,")")).call(f).append("text").attr("transform","translate("+n/2+" ,40)").style("text-anchor","middle").attr("fill","#000").html("Khoảng cách (m)"),u.append("g").attr("class","y axis").call(m).append("text").attr("transform","rotate(-90)").attr("y",-50).attr("x",0-a/2).attr("dy","1em").style("text-anchor","middle").attr("fill","#000").html("Độ sâu (m)")}(e)})).catch((function(e){console.log(e)}))}))},"u+OR":function(e,r,t){var n=t("PMrG");"string"==typeof n&&(n=[[e.i,n,""]]);var a={hmr:!0,transform:void 0,insertInto:void 0};t("aET+")(n,a);n.locals&&(e.exports=n.locals)},yLpj:function(e,r){var t;t=function(){return this}();try{t=t||new Function("return this")()}catch(e){"object"==typeof window&&(t=window)}e.exports=t}}); | i |
table.rs | use crate::commands::WholeStreamCommand;
use crate::format::TableView;
use crate::prelude::*;
use nu_errors::ShellError;
use nu_protocol::{Primitive, ReturnSuccess, Signature, SyntaxShape, UntaggedValue, Value};
use std::time::Instant;
const STREAM_PAGE_SIZE: usize = 1000;
const STREAM_TIMEOUT_CHECK_INTERVAL: usize = 100;
pub struct Table;
impl WholeStreamCommand for Table {
fn name(&self) -> &str {
"table"
}
fn signature(&self) -> Signature {
Signature::build("table").named(
"start_number",
SyntaxShape::Number,
"row number to start viewing from",
Some('n'),
)
}
fn usage(&self) -> &str |
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
table(args, registry)
}
}
fn table(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
let mut args = args.evaluate_once(registry)?;
let mut finished = false;
let stream = async_stream! {
let host = args.host.clone();
let mut start_number = match args.get("start_number") {
Some(Value { value: UntaggedValue::Primitive(Primitive::Int(i)), .. }) => {
if let Some(num) = i.to_usize() {
num
} else {
yield Err(ShellError::labeled_error("Expected a row number", "expected a row number", &args.args.call_info.name_tag));
0
}
}
_ => {
0
}
};
let mut delay_slot = None;
while !finished {
let mut new_input: VecDeque<Value> = VecDeque::new();
let start_time = Instant::now();
for idx in 0..STREAM_PAGE_SIZE {
if let Some(val) = delay_slot {
new_input.push_back(val);
delay_slot = None;
} else {
match args.input.next().await {
Some(a) => {
if !new_input.is_empty() {
if let Some(descs) = new_input.get(0) {
let descs = descs.data_descriptors();
let compare = a.data_descriptors();
if descs != compare {
delay_slot = Some(a);
break;
} else {
new_input.push_back(a);
}
} else {
new_input.push_back(a);
}
} else {
new_input.push_back(a);
}
}
_ => {
finished = true;
break;
}
}
// Check if we've gone over our buffering threshold
if (idx + 1) % STREAM_TIMEOUT_CHECK_INTERVAL == 0 {
let end_time = Instant::now();
// If we've been buffering over a second, go ahead and send out what we have so far
if (end_time - start_time).as_secs() >= 1 {
break;
}
}
}
}
let input: Vec<Value> = new_input.into();
if input.len() > 0 {
let mut host = host.lock();
let view = TableView::from_list(&input, start_number);
if let Some(view) = view {
handle_unexpected(&mut *host, |host| crate::format::print_view(&view, host));
}
}
start_number += input.len();
}
// Needed for async_stream to type check
if false {
yield ReturnSuccess::value(UntaggedValue::nothing().into_value(Tag::unknown()));
}
};
Ok(OutputStream::new(stream))
}
| {
"View the contents of the pipeline as a table."
} |
types.go | package weave
import (
"fmt"
. "github.com/kocircuit/kocircuit/lang/circuit/model"
. "github.com/kocircuit/kocircuit/lang/go/eval"
. "github.com/kocircuit/kocircuit/lang/go/eval/symbol"
"github.com/kocircuit/kocircuit/lang/go/runtime"
)
func init() |
type WeaveStepCtx struct {
Origin *Span `ko:"name=origin"` // evaluation span (not weave span)
Pkg string `ko:"name=pkg"`
Func string `ko:"name=func"`
Step string `ko:"name=step"`
Logic string `ko:"name=logic"`
Source string `ko:"name=source"`
Ctx Symbol `ko:"name=ctx"` // user ctx object
}
func (ctx *WeaveStepCtx) DelegateSpan() *Span {
return RefineOutline(ctx.Origin, fmt.Sprintf("%s @ %s", ctx.Logic, ctx.Source))
}
func (ctx *WeaveStepCtx) Deconstruct(span *Span) Symbol {
return MakeStructSymbol(
FieldSymbols{
{Name: "pkg", Value: BasicSymbol{Value: ctx.Pkg}},
{Name: "func", Value: BasicSymbol{Value: ctx.Func}},
{Name: "step", Value: BasicSymbol{Value: ctx.Step}},
{Name: "logic", Value: BasicSymbol{Value: ctx.Logic}},
{Name: "source", Value: BasicSymbol{Value: ctx.Source}},
{Name: "ctx", Value: ctx.Ctx},
},
)
}
type WeaveField struct {
Name string `ko:"name=name"`
Monadic bool `ko:"name=monadic"`
Objects Symbol `ko:"name=objects"`
}
func (field *WeaveField) Deconstruct(span *Span) Symbol {
return MakeStructSymbol(
FieldSymbols{
{Name: "name", Value: BasicSymbol{Value: field.Name}},
{Name: "monadic", Value: BasicSymbol{Value: field.Monadic}},
{Name: "objects", Value: field.Objects},
},
)
}
type WeaveFields []*WeaveField
func (bf WeaveFields) Deconstruct(span *Span) (Symbol, error) {
elem := make(Symbols, len(bf))
for i := range bf {
elem[i] = bf[i].Deconstruct(span)
}
return MakeSeriesSymbol(span, elem)
}
type WeaveFigure struct {
Int64 *int64 `ko:"name=int64"`
String *string `ko:"name=string"`
Bool *bool `ko:"name=bool"`
Float64 *float64 `ko:"name=float64"`
Transform *WeaveTransform `ko:"name=transform"`
}
func (fig *WeaveFigure) Play(*runtime.Context) *WeaveFigure {
return fig
}
type WeaveTransform struct {
Operator *WeaveOperator `ko:"name=operator"`
Func *WeaveFunc `ko:"name=func"`
}
func (w *WeaveTransform) Play(*runtime.Context) *WeaveTransform {
return w
}
type WeaveOperator struct {
Pkg string `ko:"name=pkg"`
Name string `ko:"name=name"`
}
func (w *WeaveOperator) Play(*runtime.Context) *WeaveOperator {
return w
}
type WeaveFunc struct {
Pkg string `ko:"name=pkg"`
Name string `ko:"name=name"`
}
func (w *WeaveFunc) Play(*runtime.Context) *WeaveFunc {
return w
}
func (fig *WeaveFigure) Deconstruct(span *Span) Symbol {
return DeconstructInterface(span, fig)
}
type WeaveResidue struct {
Step string `ko:"name=step"`
Logic string `ko:"name=logic"`
Source string `ko:"name=source"`
Returns Symbol `ko:"name=returns"`
Effect Symbol `ko:"name=effect"`
}
func (residue *WeaveResidue) Deconstruct(span *Span) Symbol {
return MakeStructSymbol(
FieldSymbols{
{Name: "step", Value: BasicSymbol{Value: residue.Step}},
{Name: "logic", Value: BasicSymbol{Value: residue.Logic}},
{Name: "source", Value: BasicSymbol{Value: residue.Source}},
{Name: "returns", Value: residue.Returns},
{Name: "effect", Value: residue.Effect},
},
)
}
type WeaveResidues []*WeaveResidue
func (br WeaveResidues) Deconstruct(span *Span) (Symbol, error) {
elem := make(Symbols, len(br))
for i := range br {
elem[i] = br[i].Deconstruct(span)
}
return MakeSeriesSymbol(span, elem)
}
type WeaveSummary struct {
Origin *Span `ko:"name=origin"` // evaluation span (not weave span)
Pkg string `ko:"name=pkg"`
Func string `ko:"name=func"`
Source string `ko:"name=source"`
Ctx Symbol `ko:"name=ctx"` // user ctx object
Arg Symbol `ko:"name=arg"`
Returns Symbol `ko:"name=returns"`
}
func (summary *WeaveSummary) CombineSpan() *Span {
return RefineOutline(summary.Origin, fmt.Sprintf("COMBINE @ %s", summary.Source))
}
func (summary *WeaveSummary) Deconstruct(span *Span) Symbol {
return MakeStructSymbol(
FieldSymbols{
{Name: "pkg", Value: BasicSymbol{Value: summary.Pkg}},
{Name: "func", Value: BasicSymbol{Value: summary.Func}},
{Name: "source", Value: BasicSymbol{Value: summary.Source}},
{Name: "ctx", Value: summary.Ctx},
{Name: "arg", Value: summary.Arg},
{Name: "returns", Value: summary.Returns},
},
)
}
type WeaveStepResult struct {
Returns Symbol `ko:"name=returns"`
Effect Symbol `ko:"name=effect"`
}
| {
RegisterEvalGateAt("", "WeaveFigure", new(WeaveFigure))
RegisterEvalGateAt("", "WeaveTransform", new(WeaveTransform))
RegisterEvalGateAt("", "WeaveFunc", new(WeaveFunc))
RegisterEvalGateAt("", "WeaveOperator", new(WeaveOperator))
} |
bad-assoc-ty.rs | type A = [u8; 4]::AssocTy;
//~^ ERROR missing angle brackets in associated item path
//~| ERROR ambiguous associated type
type B = [u8]::AssocTy;
//~^ ERROR missing angle brackets in associated item path
//~| ERROR ambiguous associated type
type C = (u8)::AssocTy;
//~^ ERROR missing angle brackets in associated item path
//~| ERROR ambiguous associated type
type D = (u8, u8)::AssocTy;
//~^ ERROR missing angle brackets in associated item path
//~| ERROR ambiguous associated type
type E = _::AssocTy;
//~^ ERROR missing angle brackets in associated item path
//~| ERROR the placeholder `_` is not allowed within types on item signatures for type aliases
type F = &'static (u8)::AssocTy;
//~^ ERROR missing angle brackets in associated item path
//~| ERROR ambiguous associated type
// Qualified paths cannot appear in bounds, so the recovery
// should apply to the whole sum and not `(Send)`.
type G = dyn 'static + (Send)::AssocTy;
//~^ ERROR missing angle brackets in associated item path
//~| ERROR ambiguous associated type
// This is actually a legal path with fn-like generic arguments in the middle!
// Recovery should not apply in this context.
type H = Fn(u8) -> (u8)::Output;
//~^ ERROR ambiguous associated type
//~| WARN trait objects without an explicit `dyn` are deprecated
//~| WARN this is accepted in the current edition
macro_rules! ty {
($ty: ty) => ($ty::AssocTy);
//~^ ERROR missing angle brackets in associated item path
//~| ERROR ambiguous associated type
() => (u8);
}
type J = ty!(u8);
type I = ty!()::AssocTy;
//~^ ERROR missing angle brackets in associated item path
//~| ERROR ambiguous associated type
trait K<A, B> {}
fn foo<X: K<_, _>>(x: X) {}
//~^ ERROR the placeholder `_` is not allowed within types on item signatures for functions
fn bar<F>(_: F) where F: Fn() -> _ {}
//~^ ERROR the placeholder `_` is not allowed within types on item signatures for functions
fn baz<F: Fn() -> _>(_: F) |
//~^ ERROR the placeholder `_` is not allowed within types on item signatures for functions
struct L<F>(F) where F: Fn() -> _;
//~^ ERROR the placeholder `_` is not allowed within types on item signatures for structs
struct M<F> where F: Fn() -> _ {
//~^ ERROR the placeholder `_` is not allowed within types on item signatures for structs
a: F,
}
enum N<F> where F: Fn() -> _ {
//~^ ERROR the placeholder `_` is not allowed within types on item signatures for enums
Foo(F),
}
union O<F> where F: Fn() -> _ {
//~^ ERROR the placeholder `_` is not allowed within types on item signatures for unions
foo: F,
}
trait P<F> where F: Fn() -> _ {
//~^ ERROR the placeholder `_` is not allowed within types on item signatures for traits
}
trait Q {
fn foo<F>(_: F) where F: Fn() -> _ {}
//~^ ERROR the placeholder `_` is not allowed within types on item signatures for functions
}
fn main() {}
| {} |
exclusions.py | # testing/exclusions.py
# Copyright (C) 2005-2019 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
import contextlib
import operator
import re
from . import config
from .. import util
from ..util import decorator
from ..util.compat import inspect_getfullargspec
def skip_if(predicate, reason=None):
rule = compound()
pred = _as_predicate(predicate, reason)
rule.skips.add(pred)
return rule
def fails_if(predicate, reason=None):
rule = compound()
pred = _as_predicate(predicate, reason)
rule.fails.add(pred)
return rule
class compound(object):
def __init__(self):
self.fails = set()
self.skips = set()
self.tags = set()
self.combinations = {}
def __add__(self, other):
return self.add(other)
def with_combination(self, **kw):
copy = compound()
copy.fails.update(self.fails)
copy.skips.update(self.skips)
copy.tags.update(self.tags)
copy.combinations.update((f, kw) for f in copy.fails)
copy.combinations.update((s, kw) for s in copy.skips)
return copy
def add(self, *others):
copy = compound()
copy.fails.update(self.fails)
copy.skips.update(self.skips)
copy.tags.update(self.tags)
for other in others:
copy.fails.update(other.fails)
copy.skips.update(other.skips)
copy.tags.update(other.tags)
return copy
def not_(self):
copy = compound()
copy.fails.update(NotPredicate(fail) for fail in self.fails)
copy.skips.update(NotPredicate(skip) for skip in self.skips)
copy.tags.update(self.tags)
return copy
@property
def enabled(self):
return self.enabled_for_config(config._current)
def enabled_for_config(self, config):
for predicate in self.skips.union(self.fails):
if predicate(config):
return False
else:
return True
def matching_config_reasons(self, config):
return [
predicate._as_string(config)
for predicate in self.skips.union(self.fails)
if predicate(config)
]
def include_test(self, include_tags, exclude_tags):
return bool(
not self.tags.intersection(exclude_tags)
and (not include_tags or self.tags.intersection(include_tags))
)
def _extend(self, other):
self.skips.update(other.skips)
self.fails.update(other.fails)
self.tags.update(other.tags)
self.combinations.update(other.combinations)
def __call__(self, fn):
if hasattr(fn, "_sa_exclusion_extend"):
fn._sa_exclusion_extend._extend(self)
return fn
@decorator
def decorate(fn, *args, **kw):
return self._do(config._current, fn, *args, **kw)
decorated = decorate(fn)
decorated._sa_exclusion_extend = self
return decorated
@contextlib.contextmanager
def fail_if(self):
all_fails = compound()
all_fails.fails.update(self.skips.union(self.fails))
try:
yield
except Exception as ex:
all_fails._expect_failure(config._current, ex, None)
else:
all_fails._expect_success(config._current, None)
def _check_combinations(self, combination, predicate):
if predicate in self.combinations:
for k, v in combination:
if (
k in self.combinations[predicate]
and self.combinations[predicate][k] != v
):
return False
return True
def _do(self, cfg, fn, *args, **kw):
if len(args) > 1:
insp = inspect_getfullargspec(fn)
combination = list(zip(insp.args[1:], args[1:]))
else:
combination = None
for skip in self.skips:
if self._check_combinations(combination, skip) and skip(cfg):
msg = "'%s' : %s" % (
config.get_current_test_name(),
skip._as_string(cfg),
)
config.skip_test(msg)
try:
return_value = fn(*args, **kw)
except Exception as ex:
self._expect_failure(cfg, ex, combination, name=fn.__name__)
else:
self._expect_success(cfg, combination, name=fn.__name__)
return return_value
def _expect_failure(self, config, ex, combination, name="block"):
for fail in self.fails:
if self._check_combinations(combination, fail) and fail(config):
if util.py2k:
str_ex = unicode(ex).encode("utf-8", errors="ignore")
else:
str_ex = str(ex)
print(
(
"%s failed as expected (%s): %s "
% (name, fail._as_string(config), str_ex)
)
)
break
else:
util.raise_from_cause(ex)
def _expect_success(self, config, combination, name="block"):
if not self.fails:
return
for fail in self.fails:
if self._check_combinations(combination, fail) and fail(config):
raise AssertionError(
"Unexpected success for '%s' (%s)"
% (
name,
" and ".join(
fail._as_string(config) for fail in self.fails
),
)
)
def requires_tag(tagname):
return tags([tagname])
def tags(tagnames):
comp = compound()
comp.tags.update(tagnames)
return comp
def only_if(predicate, reason=None):
predicate = _as_predicate(predicate)
return skip_if(NotPredicate(predicate), reason)
def succeeds_if(predicate, reason=None):
predicate = _as_predicate(predicate)
return fails_if(NotPredicate(predicate), reason)
class Predicate(object):
@classmethod
def as_predicate(cls, predicate, description=None):
if isinstance(predicate, compound):
return cls.as_predicate(predicate.enabled_for_config, description)
elif isinstance(predicate, Predicate):
if description and predicate.description is None:
predicate.description = description
return predicate
elif isinstance(predicate, (list, set)):
return OrPredicate(
[cls.as_predicate(pred) for pred in predicate], description
)
elif isinstance(predicate, tuple):
return SpecPredicate(*predicate)
elif isinstance(predicate, util.string_types):
tokens = re.match(
r"([\+\w]+)\s*(?:(>=|==|!=|<=|<|>)\s*([\d\.]+))?", predicate
)
if not tokens:
raise ValueError(
"Couldn't locate DB name in predicate: %r" % predicate
)
db = tokens.group(1)
op = tokens.group(2)
spec = (
tuple(int(d) for d in tokens.group(3).split("."))
if tokens.group(3)
else None
)
return SpecPredicate(db, op, spec, description=description)
elif util.callable(predicate):
return LambdaPredicate(predicate, description)
else:
assert False, "unknown predicate type: %s" % predicate
def _format_description(self, config, negate=False):
bool_ = self(config)
if negate:
bool_ = not negate
return self.description % {
"driver": config.db.url.get_driver_name()
if config
else "<no driver>",
"database": config.db.url.get_backend_name()
if config
else "<no database>",
"doesnt_support": "doesn't support" if bool_ else "does support",
"does_support": "does support" if bool_ else "doesn't support",
}
def _as_string(self, config=None, negate=False):
raise NotImplementedError()
class BooleanPredicate(Predicate):
def __init__(self, value, description=None):
self.value = value
self.description = description or "boolean %s" % value
def __call__(self, config):
return self.value
def _as_string(self, config, negate=False):
return self._format_description(config, negate=negate)
class SpecPredicate(Predicate):
def __init__(self, db, op=None, spec=None, description=None):
self.db = db
self.op = op
self.spec = spec
self.description = description
_ops = {
"<": operator.lt,
">": operator.gt,
"==": operator.eq,
"!=": operator.ne,
"<=": operator.le,
">=": operator.ge,
"in": operator.contains,
"between": lambda val, pair: val >= pair[0] and val <= pair[1],
}
def __call__(self, config):
engine = config.db
if "+" in self.db:
dialect, driver = self.db.split("+")
else:
dialect, driver = self.db, None
if dialect and engine.name != dialect:
return False
if driver is not None and engine.driver != driver:
return False
if self.op is not None:
assert driver is None, "DBAPI version specs not supported yet"
version = _server_version(engine)
oper = (
hasattr(self.op, "__call__") and self.op or self._ops[self.op]
)
return oper(version, self.spec)
else:
return True
def _as_string(self, config, negate=False):
if self.description is not None:
return self._format_description(config)
elif self.op is None:
if negate:
return "not %s" % self.db
else:
return "%s" % self.db
else:
if negate:
return "not %s %s %s" % (self.db, self.op, self.spec)
else:
return "%s %s %s" % (self.db, self.op, self.spec)
class LambdaPredicate(Predicate):
def __init__(self, lambda_, description=None, args=None, kw=None):
spec = inspect_getfullargspec(lambda_)
if not spec[0]:
self.lambda_ = lambda db: lambda_()
else:
self.lambda_ = lambda_
self.args = args or ()
self.kw = kw or {}
if description:
self.description = description
elif lambda_.__doc__:
self.description = lambda_.__doc__
else:
self.description = "custom function"
def __call__(self, config):
return self.lambda_(config)
def _as_string(self, config, negate=False):
return self._format_description(config)
class NotPredicate(Predicate):
def __init__(self, predicate, description=None):
self.predicate = predicate
self.description = description
def __call__(self, config):
return not self.predicate(config)
def _as_string(self, config, negate=False):
if self.description:
return self._format_description(config, not negate)
else:
return self.predicate._as_string(config, not negate)
class OrPredicate(Predicate):
def __init__(self, predicates, description=None):
self.predicates = predicates
self.description = description
def __call__(self, config):
for pred in self.predicates:
if pred(config):
return True
return False
def _eval_str(self, config, negate=False):
if negate:
conjunction = " and "
else:
conjunction = " or "
return conjunction.join(
p._as_string(config, negate=negate) for p in self.predicates
)
def _negation_str(self, config):
if self.description is not None:
return "Not " + self._format_description(config)
else:
return self._eval_str(config, negate=True)
def _as_string(self, config, negate=False):
if negate:
return self._negation_str(config)
else:
if self.description is not None:
return self._format_description(config)
else:
return self._eval_str(config)
_as_predicate = Predicate.as_predicate
def _is_excluded(db, op, spec):
return SpecPredicate(db, op, spec)(config._current)
def _server_version(engine):
"""Return a server_version_info tuple."""
# force metadata to be retrieved
conn = engine.connect()
version = getattr(engine.dialect, "server_version_info", None)
if version is None:
version = ()
conn.close()
return version
def db_spec(*dbs):
return OrPredicate([Predicate.as_predicate(db) for db in dbs])
def open(): # noqa
return skip_if(BooleanPredicate(False, "mark as execute"))
def closed():
return skip_if(BooleanPredicate(True, "marked as skip"))
def fails(reason=None):
return fails_if(BooleanPredicate(True, reason or "expected to fail"))
@decorator
def future(fn, *arg):
return fails_if(LambdaPredicate(fn), "Future feature")
def fails_on(db, reason=None):
return fails_if(db, reason)
def fails_on_everything_except(*dbs):
return succeeds_if(OrPredicate([Predicate.as_predicate(db) for db in dbs]))
def skip(db, reason=None):
|
def only_on(dbs, reason=None):
return only_if(
OrPredicate(
[Predicate.as_predicate(db, reason) for db in util.to_list(dbs)]
)
)
def exclude(db, op, spec, reason=None):
return skip_if(SpecPredicate(db, op, spec), reason)
def against(config, *queries):
assert queries, "no queries sent!"
return OrPredicate([Predicate.as_predicate(query) for query in queries])(
config
)
| return skip_if(db, reason) |
python.py | import gevent
import structlog
from eth_utils import is_binary_address, is_hex, to_bytes, to_checksum_address
from gevent import Greenlet
import raiden.blockchain.events as blockchain_events
from raiden import waiting
from raiden.constants import (
GENESIS_BLOCK_NUMBER,
RED_EYES_PER_TOKEN_NETWORK_LIMIT,
SECRET_HEXSTRING_LENGTH,
SECRETHASH_HEXSTRING_LENGTH,
UINT256_MAX,
Environment,
)
from raiden.exceptions import (
AlreadyRegisteredTokenAddress,
ChannelNotFound,
DepositMismatch,
DepositOverLimit,
DuplicatedChannelError,
InsufficientFunds,
InsufficientGasReserve,
InvalidAddress,
InvalidAmount,
InvalidSecretOrSecretHash,
InvalidSettleTimeout,
RaidenRecoverableError,
TokenNotRegistered,
UnknownTokenAddress,
)
from raiden.messages import RequestMonitoring
from raiden.settings import DEFAULT_RETRY_TIMEOUT, DEVELOPMENT_CONTRACT_VERSION
from raiden.transfer import architecture, views
from raiden.transfer.events import (
EventPaymentReceivedSuccess,
EventPaymentSentFailed,
EventPaymentSentSuccess,
)
from raiden.transfer.state import (
BalanceProofSignedState,
InitiatorTask,
MediatorTask,
NettingChannelState,
TargetTask,
TransferTask,
)
from raiden.transfer.state_change import ActionChannelClose
from raiden.utils import pex, sha3
from raiden.utils.gas_reserve import has_enough_gas_reserve
from raiden.utils.typing import (
Address,
Any,
BlockSpecification,
BlockTimeout,
ChannelID,
Dict,
List,
LockedTransferType,
NetworkTimeout,
Optional,
PaymentID,
PaymentNetworkID,
Secret,
SecretHash,
Set,
TokenAddress,
TokenAmount,
TokenNetworkAddress,
TokenNetworkID,
Tuple,
)
log = structlog.get_logger(__name__) # pylint: disable=invalid-name
EVENTS_PAYMENT_HISTORY_RELATED = (
EventPaymentSentSuccess,
EventPaymentSentFailed,
EventPaymentReceivedSuccess,
)
def event_filter_for_payments(
event: architecture.Event,
token_network_identifier: TokenNetworkID = None,
partner_address: Address = None,
) -> bool:
"""Filters out non payment history related events
- If no other args are given, all payment related events match
- If a token network identifier is given then only payment events for that match
- If a partner is also given then if the event is a payment sent event and the
target matches it's returned. If it's a payment received and the initiator matches
then it's returned.
"""
is_matching_event = isinstance(event, EVENTS_PAYMENT_HISTORY_RELATED) and (
token_network_identifier is None
or token_network_identifier == event.token_network_identifier
)
if not is_matching_event:
return False
sent_and_target_matches = isinstance(
event, (EventPaymentSentFailed, EventPaymentSentSuccess)
) and (partner_address is None or event.target == partner_address)
received_and_initiator_matches = isinstance(event, EventPaymentReceivedSuccess) and (
partner_address is None or event.initiator == partner_address
)
return sent_and_target_matches or received_and_initiator_matches
def flatten_transfer(transfer: LockedTransferType, role: str) -> Dict[str, Any]:
return {
"payment_identifier": str(transfer.payment_identifier),
"token_address": to_checksum_address(transfer.token),
"token_network_identifier": to_checksum_address(
transfer.balance_proof.token_network_identifier
),
"channel_identifier": str(transfer.balance_proof.channel_identifier),
"initiator": to_checksum_address(transfer.initiator),
"target": to_checksum_address(transfer.target),
"transferred_amount": str(transfer.balance_proof.transferred_amount),
"locked_amount": str(transfer.balance_proof.locked_amount),
"role": role,
}
def get_transfer_from_task(
secrethash: SecretHash, transfer_task: TransferTask
) -> Tuple[LockedTransferType, str]:
role = views.role_from_transfer_task(transfer_task)
transfer: LockedTransferType
if isinstance(transfer_task, InitiatorTask):
transfer = transfer_task.manager_state.initiator_transfers[secrethash].transfer
elif isinstance(transfer_task, MediatorTask):
pairs = transfer_task.mediator_state.transfers_pair
if pairs:
transfer = pairs[-1].payer_transfer
elif transfer_task.mediator_state.waiting_transfer:
transfer = transfer_task.mediator_state.waiting_transfer.transfer
elif isinstance(transfer_task, TargetTask):
transfer = transfer_task.target_state.transfer
else:
raise ValueError("get_tranfer_from_task for a non TransferTask argument")
return transfer, role
def transfer_tasks_view(
transfer_tasks: Dict[SecretHash, TransferTask],
token_address: TokenAddress = None,
channel_id: ChannelID = None,
) -> List[Dict[str, Any]]:
view = list()
for secrethash, transfer_task in transfer_tasks.items():
transfer, role = get_transfer_from_task(secrethash, transfer_task)
if transfer is None:
continue
if token_address is not None:
if transfer.token != token_address:
continue
elif channel_id is not None:
if transfer.balance_proof.channel_identifier != channel_id:
continue
view.append(flatten_transfer(transfer, role))
return view
class RaidenAPI:
# pylint: disable=too-many-public-methods
def __init__(self, raiden):
self.raiden = raiden
@property
def address(self):
return self.raiden.address
def get_channel(
self,
registry_address: PaymentNetworkID,
token_address: TokenAddress,
partner_address: Address,
) -> NettingChannelState:
if not is_binary_address(token_address):
raise InvalidAddress("Expected binary address format for token in get_channel")
if not is_binary_address(partner_address):
raise InvalidAddress("Expected binary address format for partner in get_channel")
channel_list = self.get_channel_list(registry_address, token_address, partner_address)
assert len(channel_list) <= 1
if not channel_list:
raise ChannelNotFound(
"Channel with partner '{}' for token '{}' could not be found.".format(
to_checksum_address(partner_address), to_checksum_address(token_address)
)
)
return channel_list[0]
def token_network_register(
self,
registry_address: PaymentNetworkID,
token_address: TokenAddress,
channel_participant_deposit_limit: TokenAmount,
token_network_deposit_limit: TokenAmount,
retry_timeout: NetworkTimeout = DEFAULT_RETRY_TIMEOUT,
) -> TokenNetworkAddress:
"""Register the `token_address` in the blockchain. If the address is already
registered but the event has not been processed this function will block
until the next block to make sure the event is processed.
Raises:
InvalidAddress: If the registry_address or token_address is not a valid address.
AlreadyRegisteredTokenAddress: If the token is already registered.
TransactionThrew: If the register transaction failed, this may
happen because the account has not enough balance to pay for the
gas or this register call raced with another transaction and lost.
"""
if not is_binary_address(registry_address):
raise InvalidAddress("registry_address must be a valid address in binary")
if not is_binary_address(token_address):
raise InvalidAddress("token_address must be a valid address in binary")
if token_address in self.get_tokens_list(registry_address):
raise AlreadyRegisteredTokenAddress("Token already registered")
contracts_version = self.raiden.contract_manager.contracts_version
registry = self.raiden.chain.token_network_registry(registry_address)
try:
if contracts_version == DEVELOPMENT_CONTRACT_VERSION:
return registry.add_token_with_limits(
token_address=token_address,
channel_participant_deposit_limit=channel_participant_deposit_limit,
token_network_deposit_limit=token_network_deposit_limit,
)
else:
return registry.add_token_without_limits(token_address=token_address)
except RaidenRecoverableError as e:
if "Token already registered" in str(e):
raise AlreadyRegisteredTokenAddress("Token already registered")
# else
raise
finally:
# Assume the transaction failed because the token is already
# registered with the smart contract and this node has not yet
# polled for the event (otherwise the check above would have
# failed).
#
# To provide a consistent view to the user, wait one block, this
# will guarantee that the events have been processed.
next_block = self.raiden.get_block_number() + 1
waiting.wait_for_block(self.raiden, next_block, retry_timeout)
def token_network_connect(
self,
registry_address: PaymentNetworkID,
token_address: TokenAddress,
funds: TokenAmount,
initial_channel_target: int = 3,
joinable_funds_target: float = 0.4,
) -> None:
""" Automatically maintain channels open for the given token network.
Args:
token_address: the ERC20 token network to connect to.
funds: the amount of funds that can be used by the ConnectionMananger.
initial_channel_target: number of channels to open proactively.
joinable_funds_target: fraction of the funds that will be used to join
channels opened by other participants.
"""
if not is_binary_address(registry_address):
raise InvalidAddress("registry_address must be a valid address in binary")
if not is_binary_address(token_address):
raise InvalidAddress("token_address must be a valid address in binary")
token_network_identifier = views.get_token_network_identifier_by_token_address(
chain_state=views.state_from_raiden(self.raiden),
payment_network_id=registry_address,
token_address=token_address,
)
connection_manager = self.raiden.connection_manager_for_token_network(
token_network_identifier
)
has_enough_reserve, estimated_required_reserve = has_enough_gas_reserve(
raiden=self.raiden, channels_to_open=initial_channel_target
)
if not has_enough_reserve:
raise InsufficientGasReserve(
(
"The account balance is below the estimated amount necessary to "
"finish the lifecycles of all active channels. A balance of at "
f"least {estimated_required_reserve} wei is required."
)
)
connection_manager.connect(
funds=funds,
initial_channel_target=initial_channel_target,
joinable_funds_target=joinable_funds_target,
)
def token_network_leave(
self, registry_address: PaymentNetworkID, token_address: TokenAddress
) -> List[NettingChannelState]:
""" Close all channels and wait for settlement. """
if not is_binary_address(registry_address):
raise InvalidAddress("registry_address must be a valid address in binary")
if not is_binary_address(token_address):
raise InvalidAddress("token_address must be a valid address in binary")
if token_address not in self.get_tokens_list(registry_address):
raise UnknownTokenAddress("token_address unknown")
token_network_identifier = views.get_token_network_identifier_by_token_address(
chain_state=views.state_from_raiden(self.raiden),
payment_network_id=registry_address,
token_address=token_address,
)
connection_manager = self.raiden.connection_manager_for_token_network(
token_network_identifier
)
return connection_manager.leave(registry_address)
def channel_open(
self,
registry_address: PaymentNetworkID,
token_address: TokenAddress,
partner_address: Address,
settle_timeout: BlockTimeout = None,
retry_timeout: NetworkTimeout = DEFAULT_RETRY_TIMEOUT,
) -> ChannelID:
""" Open a channel with the peer at `partner_address`
with the given `token_address`.
"""
if settle_timeout is None:
settle_timeout = self.raiden.config["settle_timeout"]
if settle_timeout < self.raiden.config["reveal_timeout"] * 2:
raise InvalidSettleTimeout(
"settle_timeout can not be smaller than double the reveal_timeout"
)
if not is_binary_address(registry_address):
raise InvalidAddress("Expected binary address format for registry in channel open")
if not is_binary_address(token_address):
raise InvalidAddress("Expected binary address format for token in channel open")
if not is_binary_address(partner_address):
raise InvalidAddress("Expected binary address format for partner in channel open")
chain_state = views.state_from_raiden(self.raiden)
channel_state = views.get_channelstate_for(
chain_state=chain_state,
payment_network_id=registry_address,
token_address=token_address,
partner_address=partner_address,
)
if channel_state:
raise DuplicatedChannelError("Channel with given partner address already exists")
registry = self.raiden.chain.token_network_registry(registry_address)
token_network_address = registry.get_token_network(token_address)
if token_network_address is None:
raise TokenNotRegistered(
"Token network for token %s does not exist" % to_checksum_address(token_address)
)
token_network = self.raiden.chain.token_network(registry.get_token_network(token_address))
with self.raiden.gas_reserve_lock:
has_enough_reserve, estimated_required_reserve = has_enough_gas_reserve(
self.raiden, channels_to_open=1
)
if not has_enough_reserve:
raise InsufficientGasReserve(
(
"The account balance is below the estimated amount necessary to "
"finish the lifecycles of all active channels. A balance of at "
f"least {estimated_required_reserve} wei is required."
)
)
try:
token_network.new_netting_channel(
partner=partner_address,
settle_timeout=settle_timeout,
given_block_identifier=views.state_from_raiden(self.raiden).block_hash,
)
except DuplicatedChannelError:
log.info("partner opened channel first")
waiting.wait_for_newchannel(
raiden=self.raiden,
payment_network_id=registry_address,
token_address=token_address,
partner_address=partner_address,
retry_timeout=retry_timeout,
)
chain_state = views.state_from_raiden(self.raiden)
channel_state = views.get_channelstate_for(
chain_state=chain_state,
payment_network_id=registry_address,
token_address=token_address,
partner_address=partner_address,
)
assert channel_state, f"channel {channel_state} is gone"
return channel_state.identifier
def set_total_channel_deposit(
self,
registry_address: PaymentNetworkID,
token_address: TokenAddress,
partner_address: Address,
total_deposit: TokenAmount,
retry_timeout: NetworkTimeout = DEFAULT_RETRY_TIMEOUT,
):
""" Set the `total_deposit` in the channel with the peer at `partner_address` and the
given `token_address` in order to be able to do transfers.
Raises:
InvalidAddress: If either token_address or partner_address is not
20 bytes long.
TransactionThrew: May happen for multiple reasons:
- If the token approval fails, e.g. the token may validate if
account has enough balance for the allowance.
- The deposit failed, e.g. the allowance did not set the token
aside for use and the user spent it before deposit was called.
- The channel was closed/settled between the allowance call and
the deposit call.
AddressWithoutCode: The channel was settled during the deposit
execution.
DepositOverLimit: The total deposit amount is higher than the limit.
"""
chain_state = views.state_from_raiden(self.raiden)
token_addresses = views.get_token_identifiers(chain_state, registry_address)
channel_state = views.get_channelstate_for(
chain_state=chain_state,
payment_network_id=registry_address,
token_address=token_address,
partner_address=partner_address,
)
if not is_binary_address(token_address):
raise InvalidAddress("Expected binary address format for token in channel deposit")
if not is_binary_address(partner_address):
raise InvalidAddress("Expected binary address format for partner in channel deposit")
if token_address not in token_addresses:
raise UnknownTokenAddress("Unknown token address")
if channel_state is None:
raise InvalidAddress("No channel with partner_address for the given token")
if self.raiden.config["environment_type"] == Environment.PRODUCTION:
per_token_network_deposit_limit = RED_EYES_PER_TOKEN_NETWORK_LIMIT
else:
per_token_network_deposit_limit = UINT256_MAX
token = self.raiden.chain.token(token_address)
token_network_registry = self.raiden.chain.token_network_registry(registry_address)
token_network_address = token_network_registry.get_token_network(token_address)
token_network_proxy = self.raiden.chain.token_network(token_network_address)
channel_proxy = self.raiden.chain.payment_channel(
canonical_identifier=channel_state.canonical_identifier
)
if total_deposit == 0:
raise DepositMismatch("Attempted to deposit with total deposit being 0")
addendum = total_deposit - channel_state.our_state.contract_balance
total_network_balance = token.balance_of(registry_address)
if total_network_balance + addendum > per_token_network_deposit_limit:
raise DepositOverLimit(
f"The deposit of {addendum} will exceed the "
f"token network limit of {per_token_network_deposit_limit}"
)
balance = token.balance_of(self.raiden.address)
functions = token_network_proxy.proxy.contract.functions
deposit_limit = functions.channel_participant_deposit_limit().call()
if total_deposit > deposit_limit:
raise DepositOverLimit(
f"The additional deposit of {addendum} will exceed the "
f"channel participant limit of {deposit_limit}"
)
# If this check succeeds it does not imply the the `deposit` will
# succeed, since the `deposit` transaction may race with another
# transaction.
if not balance >= addendum:
msg = "Not enough balance to deposit. {} Available={} Needed={}".format(
pex(token_address), balance, addendum
)
raise InsufficientFunds(msg)
# set_total_deposit calls approve
# token.approve(netcontract_address, addendum)
channel_proxy.set_total_deposit(
total_deposit=total_deposit,
block_identifier=views.state_from_raiden(self.raiden).block_hash,
)
target_address = self.raiden.address
waiting.wait_for_participant_newbalance(
raiden=self.raiden,
payment_network_id=registry_address,
token_address=token_address,
partner_address=partner_address,
target_address=target_address,
target_balance=total_deposit,
retry_timeout=retry_timeout,
)
def | (
self,
registry_address: PaymentNetworkID,
token_address: TokenAddress,
partner_address: Address,
retry_timeout: NetworkTimeout = DEFAULT_RETRY_TIMEOUT,
):
"""Close a channel opened with `partner_address` for the given
`token_address`.
Race condition, this can fail if channel was closed externally.
"""
self.channel_batch_close(
registry_address=registry_address,
token_address=token_address,
partner_addresses=[partner_address],
retry_timeout=retry_timeout,
)
def channel_batch_close(
self,
registry_address: PaymentNetworkID,
token_address: TokenAddress,
partner_addresses: List[Address],
retry_timeout: NetworkTimeout = DEFAULT_RETRY_TIMEOUT,
):
"""Close a channel opened with `partner_address` for the given
`token_address`.
Race condition, this can fail if channel was closed externally.
"""
if not is_binary_address(token_address):
raise InvalidAddress("Expected binary address format for token in channel close")
if not all(map(is_binary_address, partner_addresses)):
raise InvalidAddress("Expected binary address format for partner in channel close")
valid_tokens = views.get_token_identifiers(
chain_state=views.state_from_raiden(self.raiden), payment_network_id=registry_address
)
if token_address not in valid_tokens:
raise UnknownTokenAddress("Token address is not known.")
chain_state = views.state_from_raiden(self.raiden)
channels_to_close = views.filter_channels_by_partneraddress(
chain_state=chain_state,
payment_network_id=registry_address,
token_address=token_address,
partner_addresses=partner_addresses,
)
greenlets: Set[Greenlet] = set()
for channel_state in channels_to_close:
channel_close = ActionChannelClose(
canonical_identifier=channel_state.canonical_identifier
)
greenlets.update(self.raiden.handle_state_change(channel_close))
gevent.joinall(greenlets, raise_error=True)
channel_ids = [channel_state.identifier for channel_state in channels_to_close]
waiting.wait_for_close(
raiden=self.raiden,
payment_network_id=registry_address,
token_address=token_address,
channel_ids=channel_ids,
retry_timeout=retry_timeout,
)
def get_channel_list(
self,
registry_address: PaymentNetworkID,
token_address: TokenAddress = None,
partner_address: Address = None,
) -> List[NettingChannelState]:
"""Returns a list of channels associated with the optionally given
`token_address` and/or `partner_address`.
Args:
token_address: an optionally provided token address
partner_address: an optionally provided partner address
Return:
A list containing all channels the node participates. Optionally
filtered by a token address and/or partner address.
Raises:
KeyError: An error occurred when the token address is unknown to the node.
"""
if registry_address and not is_binary_address(registry_address):
raise InvalidAddress("Expected binary address format for registry in get_channel_list")
if token_address and not is_binary_address(token_address):
raise InvalidAddress("Expected binary address format for token in get_channel_list")
if partner_address:
if not is_binary_address(partner_address):
raise InvalidAddress(
"Expected binary address format for partner in get_channel_list"
)
if not token_address:
raise UnknownTokenAddress("Provided a partner address but no token address")
if token_address and partner_address:
channel_state = views.get_channelstate_for(
chain_state=views.state_from_raiden(self.raiden),
payment_network_id=registry_address,
token_address=token_address,
partner_address=partner_address,
)
if channel_state:
result = [channel_state]
else:
result = []
elif token_address:
result = views.list_channelstate_for_tokennetwork(
chain_state=views.state_from_raiden(self.raiden),
payment_network_id=registry_address,
token_address=token_address,
)
else:
result = views.list_all_channelstate(chain_state=views.state_from_raiden(self.raiden))
return result
def get_node_network_state(self, node_address: Address):
""" Returns the currently network status of `node_address`. """
return views.get_node_network_status(
chain_state=views.state_from_raiden(self.raiden), node_address=node_address
)
def start_health_check_for(self, node_address: Address):
""" Returns the currently network status of `node_address`. """
self.raiden.start_health_check_for(node_address)
def get_tokens_list(self, registry_address: PaymentNetworkID):
"""Returns a list of tokens the node knows about"""
tokens_list = views.get_token_identifiers(
chain_state=views.state_from_raiden(self.raiden), payment_network_id=registry_address
)
return tokens_list
def get_token_network_address_for_token_address(
self, registry_address: PaymentNetworkID, token_address: TokenAddress
) -> Optional[TokenNetworkID]:
return views.get_token_network_identifier_by_token_address(
chain_state=views.state_from_raiden(self.raiden),
payment_network_id=registry_address,
token_address=token_address,
)
def transfer_and_wait(
self,
registry_address: PaymentNetworkID,
token_address: TokenAddress,
amount: TokenAmount,
target: Address,
identifier: PaymentID = None,
transfer_timeout: int = None,
secret: Secret = None,
secrethash: SecretHash = None,
):
""" Do a transfer with `target` with the given `amount` of `token_address`. """
# pylint: disable=too-many-arguments
payment_status = self.transfer_async(
registry_address=registry_address,
token_address=token_address,
amount=amount,
target=target,
identifier=identifier,
secret=secret,
secrethash=secrethash,
)
payment_status.payment_done.wait(timeout=transfer_timeout)
return payment_status
def transfer_async(
self,
registry_address: PaymentNetworkID,
token_address: TokenAddress,
amount: TokenAmount,
target: Address,
identifier: PaymentID = None,
secret: Secret = None,
secrethash: SecretHash = None,
):
if not isinstance(amount, int):
raise InvalidAmount("Amount not a number")
if amount <= 0:
raise InvalidAmount("Amount negative")
if not is_binary_address(token_address):
raise InvalidAddress("token address is not valid.")
if not is_binary_address(target):
raise InvalidAddress("target address is not valid.")
if secret is not None:
if len(secret) != SECRET_HEXSTRING_LENGTH:
raise InvalidSecretOrSecretHash(
"secret length should be " + str(SECRET_HEXSTRING_LENGTH) + "."
)
if not is_hex(secret):
raise InvalidSecretOrSecretHash("provided secret is not an hexadecimal string.")
secret = to_bytes(hexstr=secret)
if secrethash is not None:
if len(secrethash) != SECRETHASH_HEXSTRING_LENGTH:
raise InvalidSecretOrSecretHash(
"secret_hash length should be " + str(SECRETHASH_HEXSTRING_LENGTH) + "."
)
if not is_hex(secrethash):
raise InvalidSecretOrSecretHash("secret_hash is not an hexadecimal string.")
secrethash = to_bytes(hexstr=secrethash)
# if both secret and secrethash were provided we check that sha3(secret)
# matches the secerthash. Note that it is valid to provide a secert_hash
# without providing a secret
if secret is not None and secrethash is not None and secrethash != sha3(secret):
raise InvalidSecretOrSecretHash("provided secret and secret_hash do not match.")
valid_tokens = views.get_token_identifiers(
views.state_from_raiden(self.raiden), registry_address
)
if token_address not in valid_tokens:
raise UnknownTokenAddress("Token address is not known.")
log.debug(
"Initiating transfer",
initiator=pex(self.raiden.address),
target=pex(target),
token=pex(token_address),
amount=amount,
identifier=identifier,
)
payment_network_identifier = self.raiden.default_registry.address
token_network_identifier = views.get_token_network_identifier_by_token_address(
chain_state=views.state_from_raiden(self.raiden),
payment_network_id=payment_network_identifier,
token_address=token_address,
)
payment_status = self.raiden.mediated_transfer_async(
token_network_identifier=token_network_identifier,
amount=amount,
target=target,
identifier=identifier,
secret=secret,
secrethash=secrethash,
)
return payment_status
def get_raiden_events_payment_history_with_timestamps(
self,
token_address: TokenAddress = None,
target_address: Address = None,
limit: int = None,
offset: int = None,
):
if token_address and not is_binary_address(token_address):
raise InvalidAddress(
"Expected binary address format for token in get_raiden_events_payment_history"
)
if target_address and not is_binary_address(target_address):
raise InvalidAddress(
"Expected binary address format for "
"target_address in get_raiden_events_payment_history"
)
token_network_identifier = None
if token_address:
token_network_identifier = views.get_token_network_identifier_by_token_address(
chain_state=views.state_from_raiden(self.raiden),
payment_network_id=self.raiden.default_registry.address,
token_address=token_address,
)
events = [
event
for event in self.raiden.wal.storage.get_events_with_timestamps(
limit=limit, offset=offset
)
if event_filter_for_payments(
event=event.wrapped_event,
token_network_identifier=token_network_identifier,
partner_address=target_address,
)
]
return events
def get_raiden_events_payment_history(
self,
token_address: TokenAddress = None,
target_address: Address = None,
limit: int = None,
offset: int = None,
):
timestamped_events = self.get_raiden_events_payment_history_with_timestamps(
token_address=token_address, target_address=target_address, limit=limit, offset=offset
)
return [event.wrapped_event for event in timestamped_events]
def get_raiden_internal_events_with_timestamps(self, limit: int = None, offset: int = None):
return self.raiden.wal.storage.get_events_with_timestamps(limit=limit, offset=offset)
transfer = transfer_and_wait
def get_blockchain_events_network(
self,
registry_address: PaymentNetworkID,
from_block: BlockSpecification = GENESIS_BLOCK_NUMBER,
to_block: BlockSpecification = "latest",
):
events = blockchain_events.get_token_network_registry_events(
chain=self.raiden.chain,
token_network_registry_address=registry_address,
contract_manager=self.raiden.contract_manager,
events=blockchain_events.ALL_EVENTS,
from_block=from_block,
to_block=to_block,
)
return sorted(events, key=lambda evt: evt.get("block_number"), reverse=True)
def get_blockchain_events_token_network(
self,
token_address: TokenAddress,
from_block: BlockSpecification = GENESIS_BLOCK_NUMBER,
to_block: BlockSpecification = "latest",
):
"""Returns a list of blockchain events coresponding to the token_address."""
if not is_binary_address(token_address):
raise InvalidAddress(
"Expected binary address format for token in get_blockchain_events_token_network"
)
token_network_address = self.raiden.default_registry.get_token_network(token_address)
if token_network_address is None:
raise UnknownTokenAddress("Token address is not known.")
returned_events = blockchain_events.get_token_network_events(
chain=self.raiden.chain,
token_network_address=token_network_address,
contract_manager=self.raiden.contract_manager,
events=blockchain_events.ALL_EVENTS,
from_block=from_block,
to_block=to_block,
)
for event in returned_events:
if event.get("args"):
event["args"] = dict(event["args"])
returned_events.sort(key=lambda evt: evt.get("block_number"), reverse=True)
return returned_events
def get_blockchain_events_channel(
self,
token_address: TokenAddress,
partner_address: Address = None,
from_block: BlockSpecification = GENESIS_BLOCK_NUMBER,
to_block: BlockSpecification = "latest",
):
if not is_binary_address(token_address):
raise InvalidAddress(
"Expected binary address format for token in get_blockchain_events_channel"
)
token_network_address = self.raiden.default_registry.get_token_network(token_address)
if token_network_address is None:
raise UnknownTokenAddress("Token address is not known.")
channel_list = self.get_channel_list(
registry_address=self.raiden.default_registry.address,
token_address=token_address,
partner_address=partner_address,
)
returned_events = []
for channel in channel_list:
returned_events.extend(
blockchain_events.get_all_netting_channel_events(
chain=self.raiden.chain,
token_network_address=token_network_address,
netting_channel_identifier=channel.identifier,
contract_manager=self.raiden.contract_manager,
from_block=from_block,
to_block=to_block,
)
)
returned_events.sort(key=lambda evt: evt.get("block_number"), reverse=True)
return returned_events
def create_monitoring_request(
self, balance_proof: BalanceProofSignedState, reward_amount: TokenAmount
) -> Optional[RequestMonitoring]:
""" This method can be used to create a `RequestMonitoring` message.
It will contain all data necessary for an external monitoring service to
- send an updateNonClosingBalanceProof transaction to the TokenNetwork contract,
for the `balance_proof` that we received from a channel partner.
- claim the `reward_amount` from the UDC.
"""
# create RequestMonitoring message from the above + `reward_amount`
monitor_request = RequestMonitoring.from_balance_proof_signed_state(
balance_proof=balance_proof, reward_amount=reward_amount
)
# sign RequestMonitoring and return
monitor_request.sign(self.raiden.signer)
return monitor_request
def get_pending_transfers(
self, token_address: TokenAddress = None, partner_address: Address = None
) -> List[Dict[str, Any]]:
chain_state = views.state_from_raiden(self.raiden)
transfer_tasks = views.get_all_transfer_tasks(chain_state)
channel_id = None
if token_address is not None:
if self.raiden.default_registry.get_token_network(token_address) is None:
raise UnknownTokenAddress(f"Token {token_address} not found.")
if partner_address is not None:
partner_channel = self.get_channel(
registry_address=self.raiden.default_registry.address,
token_address=token_address,
partner_address=partner_address,
)
channel_id = partner_channel.identifier
return transfer_tasks_view(transfer_tasks, token_address, channel_id)
| channel_close |
asyncssh-server.py | #!/usr/bin/env python
"""
Example of running a prompt_toolkit application in an asyncssh server.
"""
import asyncio
import logging
import asyncssh
from pygments.lexers.html import HtmlLexer
from prompt_toolkit.completion import WordCompleter
from prompt_toolkit.contrib.ssh import PromptToolkitSSHServer, PromptToolkitSSHSession
from prompt_toolkit.lexers import PygmentsLexer
from prompt_toolkit.shortcuts import ProgressBar, print_formatted_text
from prompt_toolkit.shortcuts.dialogs import input_dialog, yes_no_dialog
from prompt_toolkit.shortcuts.prompt import PromptSession
animal_completer = WordCompleter(
[
"alligator",
"ant",
"ape",
"bat",
"bear",
"beaver",
"bee",
"bison",
"butterfly",
"cat",
"chicken",
"crocodile",
"dinosaur",
"dog",
"dolphin",
"dove",
"duck",
"eagle",
"elephant",
"fish",
"goat",
"gorilla",
"kangaroo",
"leopard",
"lion",
"mouse",
"rabbit",
"rat",
"snake",
"spider",
"turkey",
"turtle",
],
ignore_case=True,
)
async def interact(ssh_session: PromptToolkitSSHSession) -> None:
"""
The application interaction.
This will run automatically in a prompt_toolkit AppSession, which means
that any prompt_toolkit application (dialogs, prompts, etc...) will use the
SSH channel for input and output.
"""
prompt_session = PromptSession()
# Alias 'print_formatted_text', so that 'print' calls go to the SSH client.
print = print_formatted_text
print("We will be running a few prompt_toolkit applications through this ")
print("SSH connection.\n")
# Simple progress bar.
with ProgressBar() as pb:
for i in pb(range(50)):
await asyncio.sleep(0.1)
# Normal prompt.
text = await prompt_session.prompt_async("(normal prompt) Type something: ")
print("You typed", text)
# Prompt with auto completion.
text = await prompt_session.prompt_async(
"(autocompletion) Type an animal: ", completer=animal_completer
)
print("You typed", text)
# prompt with syntax highlighting.
text = await prompt_session.prompt_async(
"(HTML syntax highlighting) Type something: ", lexer=PygmentsLexer(HtmlLexer)
)
print("You typed", text)
# Show yes/no dialog.
await prompt_session.prompt_async("Showing yes/no dialog... [ENTER]")
await yes_no_dialog("Yes/no dialog", "Running over asyncssh").run_async()
# Show input dialog
await prompt_session.prompt_async("Showing input dialog... [ENTER]")
await input_dialog("Input dialog", "Running over asyncssh").run_async()
def main(port=8222):
# Set up logging.
|
if __name__ == "__main__":
main()
| logging.basicConfig()
logging.getLogger().setLevel(logging.DEBUG)
loop = asyncio.get_event_loop()
loop.run_until_complete(
asyncssh.create_server(
lambda: PromptToolkitSSHServer(interact),
"",
port,
server_host_keys=["/etc/ssh/ssh_host_ecdsa_key"],
)
)
loop.run_forever() |
APIDef.py | {
###################################
# User data should be added below # | # e.g. "aidPage" is default function in W3 and do not need to specify here #
#"aidPage": {
# W3Const.w3ElementType: W3Const.w3TypeApi,
# W3Const.w3ApiName: "page",
# W3Const.w3ApiParams: [
# {
# W3Const.w3ApiDataType: W3Const.w3ApiDataTypeString,
# W3Const.w3ApiDataValue: "id"
# }]
#}
###################################
# User data should be added above #
###################################
} | ###################################
|