file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
component.ts | // deno-lint-ignore no-explicit-any
type Class<S> = new (...args: any[]) => S;
const registry: { [name: string]: Class<BaseComponent> } = {};
const registeredElements: HTMLElement[] = [];
export function Component(name: string) {
return (ctor: Class<BaseComponent>) => {
registry[name] = ctor;
};
}
export abstract class BaseComponent {
static readonly _name: string;
protected readonly ctx: HTMLElement;
protected constructor(ctx: HTMLElement) {
this.ctx = ctx;
} |
declare global {
interface HTMLElement {
component: BaseComponent;
}
}
export function bootComponents() {
function initializeComponents() {
document.querySelectorAll<HTMLElement>(`[data-cmp]`).forEach((ctx) => {
// do not initialize more than once
if (!registeredElements.includes(ctx)) {
const cmp = registry[ctx.dataset.cmp || ""];
if (!cmp) {
throw new Error(`No component with the name ${ctx.dataset.cmp} found.`);
}
registeredElements.push(ctx);
ctx.component = new cmp(ctx);
}
});
}
if (document.readyState === "loading") {
document.addEventListener("DOMContentLoaded", initializeComponents, false);
} else {
initializeComponents();
}
}
export function destroyComponents() {
while (registeredElements.length > 0) {
registeredElements.pop()?.component.destructor();
}
} |
destructor(): void {
}
} |
test_userid.py | import pytest
import time
from tests.live import testlib
from pandevice import base
class TestUserID_FW(object):
"""Tests UserID on live Firewall."""
def test_01_fw_login(self, fw, state_map):
state = state_map.setdefault(fw)
user, ip = testlib.random_name(), testlib.random_ip()
fw.userid.login(user, ip)
state.single_user = [user, ip]
def test_02_fw_logins(self, fw, state_map):
state = state_map.setdefault(fw)
users = [(testlib.random_name(), testlib.random_ip()) for i in range(10)]
fw.userid.logins(users)
state.multi_user = users
def test_03_fw_logout(self, fw, state_map):
state = state_map.setdefault(fw)
if not state.single_user:
raise Exception("User not logged in yet")
user, ip = state.single_user |
def test_04_fw_logouts(self, fw, state_map):
state = state_map.setdefault(fw)
if not state.multi_user:
raise Exception("User not logged in yet")
fw.userid.logouts(state.multi_user)
def test_05_register_str(self, fw, state_map):
state = state_map.setdefault(fw)
ip, tag = testlib.random_ip(), testlib.random_name()
fw.userid.register(ip, tag)
state.single_register = [ip, tag]
def test_06_unregister_str(self, fw, state_map):
state = state_map.setdefault(fw)
if not state.single_register:
raise Exception("No single_register")
ip, tag = state.single_register
fw.userid.unregister(ip, tag)
def test_07_register_lst(self, fw, state_map):
state = state_map.setdefault(fw)
ips = [testlib.random_ip() for x in range(10)]
tags = [testlib.random_name() for i in range(15)]
fw.userid.register(ips, tags)
state.multi_register_01 = [ips, tags]
def test_08_get_registered_ip(self, fw, state_map):
state = state_map.setdefault(fw)
if not state.multi_register_01:
raise Exception("Multi register not set")
ips, tags = state.multi_register_01
test1 = set(fw.userid.get_registered_ip())
assert test1 == set(ips)
test2 = set(fw.userid.get_registered_ip(ips[0:3], tags))
assert test2 == set(ips[0:3])
test3 = set(fw.userid.get_registered_ip(ips[0:3], tags[0:5]))
assert test3 == set(ips[0:3])
test4 = set(fw.userid.get_registered_ip(ips, tags[0:5]))
assert test4 == set(ips)
test5 = set(fw.userid.get_registered_ip(ips[0], tags[0]))
assert test5 == set([ips[0],])
tests = [test1, test2, test3, test4, test5]
assert len(test5) != 0
assert all([test1 >= x for x in tests])
assert all([x >= test5 for x in tests])
assert test2 >= test3
assert test4 >= test3
def test_09_audit_registered_ip(self, fw, state_map):
state = state_map.setdefault(fw)
original = set(fw.userid.get_registered_ip())
new_ips = [testlib.random_ip() for x in range(5)]
new_tags = [testlib.random_name() for i in range(8)]
ip_tags_pairs = dict([(ip, tuple(new_tags)) for ip in new_ips])
fw.userid.audit_registered_ip(ip_tags_pairs)
state.multi_register_02 = [new_ips, new_tags]
new_set = set(fw.userid.get_registered_ip())
assert len(new_set) < len(original)
assert new_set == set(new_ips)
def test_10_clear_registered_ip(self, fw, state_map):
state = state_map.setdefault(fw)
if not state.multi_register_02:
raise Exception("Multi register not set")
ips, tags = state.multi_register_02
original = list(fw.userid.get_registered_ip())
fw.userid.clear_registered_ip(ips[0], tags[0])
mod1 = list(fw.userid.get_registered_ip())
fw.userid.clear_registered_ip(ips[0:4], tags[0:5])
mod2 = list(fw.userid.get_registered_ip())
fw.userid.clear_registered_ip(ips[0:4], tags)
mod3 = list(fw.userid.get_registered_ip())
fw.userid.clear_registered_ip(ips, tags[0:7])
mod4 = list(fw.userid.get_registered_ip())
fw.userid.clear_registered_ip()
mod5 = list(fw.userid.get_registered_ip())
assert len(mod3) < len(mod2)
assert len(mod3) < len(mod1)
assert len(mod3) < len(original)
assert len(mod5) == 0
def test_11_batch(self, fw, state_map):
fw.userid.clear_registered_ip() # Fresh start
fw.userid.batch_start()
users = [(testlib.random_name(), testlib.random_ip()) for i in range(5)]
fw.userid.logins(users)
ips = [testlib.random_ip() for x in range(5)]
tags = [testlib.random_name() for y in range(5)]
fw.userid.register(ips, tags)
fw.userid.unregister(ips[2], tags[4])
fw.userid.get_registered_ip(ips[0:3], tags[2:4])
new_ips = [testlib.random_ip() for x in range(3)]
new_tags = [testlib.random_name() for y in range(3)]
fw.userid.audit_registered_ip(dict([(ip, tuple(new_tags)) for ip in new_ips]))
fw.userid.get_registered_ip()
fw.userid.unregister(new_ips, new_tags)
fw.userid.batch_end()
def test_12_uidmessage(self, fw, state_map):
state = state_map.setdefault(fw)
state.uid = fw.userid._create_uidmessage()
def test_13_send(self, fw, state_map):
state = state_map.setdefault(fw)
if not state.uid:
raise Exception("No UID")
fw.userid.send(
state.uid[0]
) # State.uid returns length-two tuple of XML elements | fw.userid.logout(user, ip) |
files.go | package filehandler
import (
"bytes"
"html/template"
"os"
"path/filepath"
)
func check(e error) {
if e != nil {
panic(e)
}
}
// CreateDirIfNotExist ...
func CreateDirIfNotExist(dir string) {
if _, err := os.Stat(dir); os.IsNotExist(err) {
err = os.MkdirAll(dir, 0755)
check(err)
}
}
// GetAllFilePathsInDirectory recursively returns all file paths in a soecified directory, including sub-directories.
func | (dirpath string) ([]string, error) {
// Get all the .tmpl files in the directory.
var paths []string
extension := ".tmpl"
err := filepath.Walk(dirpath, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if !info.IsDir() && filepath.Ext(path) == extension {
paths = append(paths, path)
}
return nil
})
if err != nil {
return nil, err
}
return paths, nil
}
// process applies the data structure 'vars' onto an already
// parsed template 't', and returns the resulting string.
func process(t *template.Template, vars interface{}) string {
var tmplBytes bytes.Buffer
err := t.Execute(&tmplBytes, vars)
check(err)
return tmplBytes.String()
}
// ProcessFile ...
func ProcessFile(fileName string, vars interface{}) string {
tmpl, err := template.ParseFiles(fileName)
check(err)
return process(tmpl, vars)
}
// WriteToFile ...
func WriteToFile(filename string, data string) {
println("Writing file: " + filename)
file, err := os.Create(filename)
check(err)
defer file.Close()
file.WriteString(data)
}
| GetAllFilePathsInDirectory |
transform_group_by_partial.rs | // Copyright 2020-2021 The Datafuse Authors.
//
// SPDX-License-Identifier: Apache-2.0.
use std::any::Any;
use std::collections::HashMap;
use std::sync::Arc;
use std::time::Instant;
use bumpalo::Bump;
use common_datablocks::DataBlock;
use common_datablocks::HashMethod;
use common_datablocks::HashMethodKind;
use common_datavalues::arrays::BinaryArrayBuilder;
use common_datavalues::prelude::*;
use common_exception::Result;
use common_infallible::RwLock;
use common_planners::Expression;
use common_streams::DataBlockStream;
use common_streams::SendableDataBlockStream;
use common_tracing::tracing;
use futures::stream::StreamExt;
use crate::pipelines::processors::EmptyProcessor;
use crate::pipelines::processors::Processor;
pub struct GroupByPartialTransform {
aggr_exprs: Vec<Expression>,
group_exprs: Vec<Expression>,
schema: DataSchemaRef,
schema_before_group_by: DataSchemaRef,
input: Arc<dyn Processor>,
}
impl GroupByPartialTransform {
pub fn create(
schema: DataSchemaRef,
schema_before_group_by: DataSchemaRef,
aggr_exprs: Vec<Expression>,
group_exprs: Vec<Expression>,
) -> Self {
Self {
aggr_exprs,
group_exprs,
schema,
schema_before_group_by,
input: Arc::new(EmptyProcessor::create()),
}
}
}
#[async_trait::async_trait]
impl Processor for GroupByPartialTransform {
fn name(&self) -> &str {
"GroupByPartialTransform"
}
fn connect_to(&mut self, input: Arc<dyn Processor>) -> Result<()> {
self.input = input;
Ok(())
}
fn inputs(&self) -> Vec<Arc<dyn Processor>> {
vec![self.input.clone()]
}
fn as_any(&self) -> &dyn Any {
self
}
/// Create hash group based on row index and apply the function with vector.
/// For example:
/// row_idx, A
/// 0, 1
/// 1, 2
/// 2, 3
/// 3, 4
/// 4, 5
///
/// grouping by [A%3]
/// 1.1)
/// row_idx, group_key, A
/// 0, 1, 1
/// 1, 2, 2
/// 2, 0, 3
/// 3, 1, 4
/// 4, 2, 5
///
/// 1.2) make indices group(for vector compute)
/// group_key, indices
/// 0, [2]
/// 1, [0, 3]
/// 2, [1, 4]
///
/// 1.3) apply aggregate function(SUM(A)) to the take block
/// group_key, SUM(A)
/// <0, 3>
/// <1, 1+4>
/// <2, 2+5>
async fn execute(&self) -> Result<SendableDataBlockStream> {
tracing::debug!("execute...");
let aggr_len = self.aggr_exprs.len();
let start = Instant::now();
let schema_before_group_by = self.schema_before_group_by.clone();
let mut funcs = Vec::with_capacity(self.aggr_exprs.len());
let mut arg_names = Vec::with_capacity(self.aggr_exprs.len());
let mut aggr_cols = Vec::with_capacity(self.aggr_exprs.len());
for expr in self.aggr_exprs.iter() {
funcs.push(expr.to_aggregate_function(&schema_before_group_by)?);
arg_names.push(expr.to_aggregate_function_names()?);
aggr_cols.push(expr.column_name());
}
let group_cols = self
.group_exprs
.iter()
.map(|x| x.column_name())
.collect::<Vec<_>>();
let mut stream = self.input.execute().await?;
let arena = Bump::new();
let sample_block = DataBlock::empty_with_schema(self.schema.clone());
let method = DataBlock::choose_hash_method(&sample_block, &group_cols)?;
macro_rules! apply {
($hash_method: ident, $key_array_builder: ty, $group_func_table: ty) => {{
// Table for <group_key, (place, keys) >
type GroupFuncTable = $group_func_table;
let groups_locker = GroupFuncTable::default();
while let Some(block) = stream.next().await {
let block = block?;
// 1.1 and 1.2.
let group_blocks = $hash_method.group_by(&block, &group_cols)?;
// 1.3 Apply take blocks to aggregate function by group_key.
{
for (group_key, group_keys, take_block) in group_blocks {
let rows = take_block.num_rows();
let mut groups = groups_locker.write();
match groups.get_mut(&group_key) {
// New group.
None => {
let mut places = Vec::with_capacity(aggr_cols.len());
for (idx, _aggr_col) in aggr_cols.iter().enumerate() {
let func = funcs[idx].clone();
let place = funcs[idx].allocate_state(&arena);
let arg_columns = arg_names[idx]
.iter()
.map(|arg| {
take_block
.try_column_by_name(arg)
.map(|c| c.clone())
})
.collect::<Result<Vec<DataColumn>>>()?;
func.accumulate(place, &arg_columns, rows)?;
places.push(place);
}
groups.insert(group_key.clone(), (places, group_keys));
}
// Accumulate result against the take block by indices.
Some((places, _)) => {
for (idx, _aggr_col) in aggr_cols.iter().enumerate() {
let arg_columns = arg_names[idx]
.iter()
.map(|arg| {
take_block
.try_column_by_name(arg)
.map(|c| c.clone())
})
.collect::<Result<Vec<DataColumn>>>()?;
funcs[idx].accumulate(places[idx], &arg_columns, rows)?
}
}
}
}
}
}
let delta = start.elapsed();
tracing::debug!("Group by partial cost: {:?}", delta);
let groups = groups_locker.read();
if groups.is_empty() {
return Ok(Box::pin(DataBlockStream::create(
DataSchemaRefExt::create(vec![]),
None,
vec![],
)));
}
let mut group_arrays = Vec::with_capacity(group_cols.len());
for _i in 0..group_cols.len() {
group_arrays.push(Vec::with_capacity(groups.len()));
}
// Builders.
let mut state_builders: Vec<BinaryArrayBuilder> = (0..aggr_len)
.map(|_| BinaryArrayBuilder::new(groups.len() * 4))
.collect();
type KeyBuilder = $key_array_builder;
let mut group_key_builder = KeyBuilder::new(groups.len());
for (key, (places, values)) in groups.iter() {
for (idx, func) in funcs.iter().enumerate() {
let mut writer = vec![];
func.serialize(places[idx], &mut writer)?;
state_builders[idx].append_value(&writer);
}
for (i, value) in values.iter().enumerate() {
group_arrays[i].push(value.clone());
}
// Keys
group_key_builder.append_value((*key).clone());
}
let mut columns: Vec<Series> = Vec::with_capacity(self.schema.fields().len());
for mut builder in state_builders {
columns.push(builder.finish().into_series());
}
for (i, values) in group_arrays.iter().enumerate() {
columns.push(DataValue::try_into_data_array(
values,
&self.group_exprs[i].to_data_type(&self.schema_before_group_by)?,
)?)
}
let array = group_key_builder.finish();
columns.push(array.into_series());
let block = DataBlock::create_by_array(self.schema.clone(), columns);
Ok(Box::pin(DataBlockStream::create(
self.schema.clone(),
None,
vec![block],
)))
}};
}
macro_rules! match_hash_method_and_apply {
($method: ident, $apply: ident) => {{
match $method {
HashMethodKind::Serializer(hash_method) => {
apply! { hash_method, BinaryArrayBuilder , RwLock<HashMap<Vec<u8>, (Vec<usize>, Vec<DataValue>), ahash::RandomState>>}
} | apply! { hash_method , DFUInt8ArrayBuilder, RwLock<HashMap<u8, (Vec<usize>, Vec<DataValue>), ahash::RandomState>> }
}
HashMethodKind::KeysU16(hash_method) => {
apply! { hash_method , DFUInt16ArrayBuilder, RwLock<HashMap<u16, (Vec<usize>, Vec<DataValue>), ahash::RandomState>> }
}
HashMethodKind::KeysU32(hash_method) => {
apply! { hash_method , DFUInt32ArrayBuilder, RwLock<HashMap<u32, (Vec<usize>, Vec<DataValue>), ahash::RandomState>> }
}
HashMethodKind::KeysU64(hash_method) => {
apply! { hash_method , DFUInt64ArrayBuilder, RwLock<HashMap<u64, (Vec<usize>, Vec<DataValue>), ahash::RandomState>> }
}
}
}};
}
match_hash_method_and_apply! {method, apply}
}
} | HashMethodKind::KeysU8(hash_method) => { |
specs.py | # MIT License
# This project is a software package to automate the performance tracking of the HPC algorithms
# Copyright (c) 2021. Victor Tuah Kumi, Ahmed Iqbal, Javier Vite, Aidan Forester
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""Gets specifications for GPU/CPU on which rocm is run"""
import model
def | (hardware_ids, rocm_versions):
"""Gets specification for all specified rocm hardwares"""
specs_info = []
for rocm in rocm_versions:
for hardw_id in hardware_ids:
specs = model.get_specs(hardw_id, rocm) #returns dictionary
if specs is not None:
title = f'{rocm} specs'
info = f'''
```
{title}
Host info:
hostname: {specs['hostname']}
cpu info: {specs['cpu_info']}
ram: {specs['ram']}
distro: {specs['distro']}
kernel version: {specs['kernel']}
rocm version: {specs['rocm']}
Device info:
device: {specs['device']}
vbios version: {specs['vbios']}
vram: {specs['vram']}
performance level: {specs['performance']}
system clock: {specs['sys_clock']}
memory clock: {specs['mem_clock']}
```
'''
specs_info.append(info)
return specs_info
| get_specs |
extension.py | from waldur_core.core import WaldurExtension
class PlaybookJobsExtension(WaldurExtension):
class Settings: | WALDUR_PLAYBOOK_JOBS = {
'PLAYBOOKS_DIR_NAME': 'ansible_playbooks',
'PLAYBOOK_ICON_SIZE': (64, 64),
}
@staticmethod
def django_app():
return 'waldur_ansible.playbook_jobs'
@staticmethod
def rest_urls():
from .urls import register_in
return register_in
@staticmethod
def is_assembly():
return True | |
build_requires_test.py | import unittest
from nose_parameterized.parameterized import parameterized
from conans.test.utils.tools import TestClient
from conans.paths import CONANFILE
tool_conanfile = """
import os
from conans import ConanFile
class Tool(ConanFile):
name = "Tool"
version = "0.1"
def package_info(self):
self.env_info.TOOL_PATH.append("MyToolPath")
"""
tool_conanfile2 = tool_conanfile.replace("0.1", "0.3")
conanfile = """
import os
from conans import ConanFile, tools
class MyLib(ConanFile):
name = "MyLib"
version = "0.1"
{}
def build(self):
self.output.info("ToolPath: %s" % os.getenv("TOOL_PATH"))
"""
requires = conanfile.format('build_requires = "Tool/0.1@lasote/stable"')
requires_range = conanfile.format('build_requires = "Tool/[>0.0]@lasote/stable"')
requirements = conanfile.format("""def build_requirements(self):
self.build_requires("Tool/0.1@lasote/stable")""")
override = conanfile.format("""build_requires = "Tool/0.2@user/channel"
def build_requirements(self):
self.build_requires("Tool/0.1@lasote/stable")""")
profile = """
[build_requires]
Tool/0.3@lasote/stable
nonexistingpattern*: SomeTool/1.2@user/channel
"""
class BuildRequiresTest(unittest.TestCase):
@parameterized.expand([(requires, ), (requires_range, ), (requirements, ), (override, )])
def | (self, conanfile):
client = TestClient()
client.save({CONANFILE: tool_conanfile}, clean_first=True)
client.run("export lasote/stable")
client.save({CONANFILE: conanfile}, clean_first=True)
client.run("export lasote/stable")
client.run("install MyLib/0.1@lasote/stable --build missing")
self.assertIn("Tool/0.1@lasote/stable: Generating the package", client.user_io.out)
self.assertIn("ToolPath: MyToolPath", client.user_io.out)
client.run("install MyLib/0.1@lasote/stable")
self.assertNotIn("Tool", client.user_io.out)
self.assertIn("MyLib/0.1@lasote/stable: Already installed!", client.user_io.out)
@parameterized.expand([(requires, ), (requires_range, ), (requirements, ), (override, )])
def test_profile_override(self, conanfile):
client = TestClient()
client.save({CONANFILE: tool_conanfile2}, clean_first=True)
client.run("export lasote/stable")
client.save({CONANFILE: conanfile,
"profile.txt": profile,
"profile2.txt": profile.replace("0.3", "[>0.2]")}, clean_first=True)
client.run("export lasote/stable")
client.run("install MyLib/0.1@lasote/stable --profile ./profile.txt --build missing")
self.assertNotIn("Tool/0.1", client.user_io.out)
self.assertNotIn("Tool/0.2", client.user_io.out)
self.assertIn("Tool/0.3@lasote/stable: Generating the package", client.user_io.out)
self.assertIn("ToolPath: MyToolPath", client.user_io.out)
client.run("install MyLib/0.1@lasote/stable")
self.assertNotIn("Tool", client.user_io.out)
self.assertIn("MyLib/0.1@lasote/stable: Already installed!", client.user_io.out)
client.run("install MyLib/0.1@lasote/stable --profile ./profile2.txt --build")
self.assertNotIn("Tool/0.1", client.user_io.out)
self.assertNotIn("Tool/0.2", client.user_io.out)
self.assertIn("Tool/0.3@lasote/stable: Generating the package", client.user_io.out)
self.assertIn("ToolPath: MyToolPath", client.user_io.out)
| test_build_requires |
squeezenet.py | import torch
from torch import nn
from torch.nn import functional as F
import torchvision
def main():
|
if __name__ == '__main__':
main()
| print('cuda device count: ', torch.cuda.device_count())
net = torchvision.models.squeezenet1_1(pretrained=True)
#net.fc = nn.Linear(512, 2)
net = net.eval()
net = net.to('cuda:0')
print(net)
tmp = torch.ones(2, 3, 227, 227).to('cuda:0')
out = net(tmp)
print('squeezenet out:', out.shape)
torch.save(net, "squeezenet.pth") |
unassociate_eip_address.go | package vpc
//Licensed under the Apache License, Version 2.0 (the "License");
//you may not use this file except in compliance with the License.
//You may obtain a copy of the License at
//
//http://www.apache.org/licenses/LICENSE-2.0
//
//Unless required by applicable law or agreed to in writing, software
//distributed under the License is distributed on an "AS IS" BASIS,
//WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//See the License for the specific language governing permissions and
//limitations under the License.
//
// Code generated by Alibaba Cloud SDK Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests"
"github.com/aliyun/alibaba-cloud-sdk-go/sdk/responses"
)
// UnassociateEipAddress invokes the vpc.UnassociateEipAddress API synchronously
// api document: https://help.aliyun.com/api/vpc/unassociateeipaddress.html
func (client *Client) UnassociateEipAddress(request *UnassociateEipAddressRequest) (response *UnassociateEipAddressResponse, err error) {
response = CreateUnassociateEipAddressResponse()
err = client.DoAction(request, response)
return
}
// UnassociateEipAddressWithChan invokes the vpc.UnassociateEipAddress API asynchronously
// api document: https://help.aliyun.com/api/vpc/unassociateeipaddress.html
// asynchronous document: https://help.aliyun.com/document_detail/66220.html
func (client *Client) UnassociateEipAddressWithChan(request *UnassociateEipAddressRequest) (<-chan *UnassociateEipAddressResponse, <-chan error) {
responseChan := make(chan *UnassociateEipAddressResponse, 1)
errChan := make(chan error, 1)
err := client.AddAsyncTask(func() {
defer close(responseChan)
defer close(errChan)
response, err := client.UnassociateEipAddress(request)
if err != nil {
errChan <- err
} else {
responseChan <- response
}
})
if err != nil {
errChan <- err
close(responseChan)
close(errChan)
}
return responseChan, errChan
}
// UnassociateEipAddressWithCallback invokes the vpc.UnassociateEipAddress API asynchronously
// api document: https://help.aliyun.com/api/vpc/unassociateeipaddress.html
// asynchronous document: https://help.aliyun.com/document_detail/66220.html
func (client *Client) UnassociateEipAddressWithCallback(request *UnassociateEipAddressRequest, callback func(response *UnassociateEipAddressResponse, err error)) <-chan int {
result := make(chan int, 1)
err := client.AddAsyncTask(func() {
var response *UnassociateEipAddressResponse
var err error
defer close(result)
response, err = client.UnassociateEipAddress(request)
callback(response, err)
result <- 1
})
if err != nil {
defer close(result)
callback(nil, err)
result <- 0
}
return result
}
// UnassociateEipAddressRequest is the request struct for api UnassociateEipAddress
type UnassociateEipAddressRequest struct {
*requests.RpcRequest
PrivateIpAddress string `position:"Query" name:"PrivateIpAddress"`
ResourceOwnerId requests.Integer `position:"Query" name:"ResourceOwnerId"`
InstanceId string `position:"Query" name:"InstanceId"`
ResourceOwnerAccount string `position:"Query" name:"ResourceOwnerAccount"`
OwnerAccount string `position:"Query" name:"OwnerAccount"`
InstanceType string `position:"Query" name:"InstanceType"`
Force requests.Boolean `position:"Query" name:"Force"`
AllocationId string `position:"Query" name:"AllocationId"`
OwnerId requests.Integer `position:"Query" name:"OwnerId"`
}
// UnassociateEipAddressResponse is the response struct for api UnassociateEipAddress
type UnassociateEipAddressResponse struct {
*responses.BaseResponse
RequestId string `json:"RequestId" xml:"RequestId"`
}
// CreateUnassociateEipAddressRequest creates a request to invoke UnassociateEipAddress API
func CreateUnassociateEipAddressRequest() (request *UnassociateEipAddressRequest) {
request = &UnassociateEipAddressRequest{
RpcRequest: &requests.RpcRequest{},
}
request.InitWithApiInfo("Vpc", "2016-04-28", "UnassociateEipAddress", "vpc", "openAPI")
return
}
// CreateUnassociateEipAddressResponse creates a response to parse from UnassociateEipAddress response
func | () (response *UnassociateEipAddressResponse) {
response = &UnassociateEipAddressResponse{
BaseResponse: &responses.BaseResponse{},
}
return
}
| CreateUnassociateEipAddressResponse |
main.go | //
// Copyright 2020 Verizon Media
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
package main
import (
"flag"
"fmt"
"github.com/AthenZ/athenz/libs/go/sia/util"
"github.com/AthenZ/athenz/provider/azure/sia-vm"
"github.com/AthenZ/athenz/provider/azure/sia-vm/data/attestation"
"github.com/AthenZ/athenz/provider/azure/sia-vm/options"
"io/ioutil"
"log"
"os"
"os/signal"
"strings"
"syscall"
"time"
)
var MetaEndPoint = "http://169.254.169.254"
var ApiVersion = "2020-06-01"
const siaMainDir = "/var/lib/sia"
const siaLinkDir = "/var/run/sia"
const siaVersion = "1.0"
func main() {
cmd := flag.String("cmd", "", "optional sub command to run")
metaEndPoint := flag.String("meta", "", "optional meta endpoint to use for debugging")
ztsEndPoint := flag.String("zts", "", "optional zts endpoint")
ztsServerName := flag.String("ztsservername", "", "zts server name for tls connections")
ztsCACert := flag.String("ztscacert", "", "zts CA certificate file")
ztsAzureDomains := flag.String("ztsazuredomain", "", "ZTS Azure Domain")
ztsResourceUri := flag.String("ztsresourceuri", "", "ZTS AD App Resource URI")
azureProvider := flag.String("azureProvider", "", "Azure Provider Service Name")
countryName := flag.String("countryname", "US", "X.509 Certificate Country Value")
pConf := flag.String("config", "/etc/sia/sia_config", "The config file to run against")
noSysLog := flag.Bool("nosyslog", false, "turn off syslog, log to stdout")
flag.Parse()
if !*noSysLog {
sysLogger, err := util.NewSysLogger()
if err == nil {
log.SetOutput(sysLogger)
} else {
log.SetFlags(log.LstdFlags)
log.Printf("Unable to create sys logger: %v\n", err)
}
} else {
log.SetFlags(log.LstdFlags)
}
if *ztsEndPoint == "" {
log.Fatalf("ztsEndPoint argument must be specified\n")
}
if *ztsAzureDomains == "" {
log.Fatalf("ztsazuredomain argument must be specified\n")
}
ztsAzureDomainList := strings.Split(*ztsAzureDomains, ",")
if *ztsResourceUri == "" {
log.Fatalf("ztsresourceuri argument must be specified\n")
}
if *metaEndPoint != "" {
MetaEndPoint = *metaEndPoint
}
identityDocument, err := attestation.GetIdentityDocument(MetaEndPoint, ApiVersion)
if err != nil {
log.Fatalf("Unable to get the instance identity document, error: %v\n", err)
}
confBytes, _ := ioutil.ReadFile(*pConf)
opts, err := options.NewOptions(confBytes, identityDocument, siaMainDir, siaVersion, *ztsCACert, *ztsServerName, ztsAzureDomainList, *countryName, *azureProvider)
if err != nil {
log.Fatalf("Unable to formulate options, error: %v\n", err)
}
log.Printf("options: %+v\n", opts)
data, err := getAttestationData(*ztsResourceUri, identityDocument, opts)
if err != nil {
log.Fatalf("Unable to formulate attestation data, error: %v\n", err)
}
// for now we're going to rotate once every day
// since our server and role certs are valid for
// 30 days by default
rotationInterval := 24 * 60 * time.Minute
ztsUrl := fmt.Sprintf("https://%s:4443/zts/v1", *ztsEndPoint)
err = util.SetupSIADirs(siaMainDir, siaLinkDir, -1, -1)
if err != nil {
log.Fatalf("Unable to setup sia directories, error: %v\n", err)
}
log.Printf("Request SSH Certificates: %t\n", opts.Ssh)
svcs := options.GetSvcNames(opts.Services)
switch *cmd {
case "rolecert":
sia.GetRoleCertificate(ztsUrl,
fmt.Sprintf("%s/%s.%s.key.pem", opts.KeyDir, opts.Domain, opts.Services[0].Name),
fmt.Sprintf("%s/%s.%s.cert.pem", opts.CertDir, opts.Domain, opts.Services[0].Name),
opts,
)
case "post":
err := sia.RegisterInstance(data, ztsUrl, identityDocument, opts)
if err != nil {
log.Fatalf("Register identity failed, err: %v\n", err)
}
log.Printf("identity registered for services: %s\n", svcs)
case "rotate":
err = sia.RefreshInstance(data, ztsUrl, identityDocument, opts)
if err != nil {
log.Fatalf("Refresh identity failed, err: %v\n", err)
}
log.Printf("Identity successfully refreshed for services: %s\n", svcs)
default:
// if we already have a cert file then we're not going to
// prove our identity since most likely it will not succeed
// due to boot time check (this could be just a regular
// service restart for any reason). Instead, we'll just skip
// over and try to rotate the certs
initialSetup := true
if files, err := ioutil.ReadDir(opts.CertDir); err != nil || len(files) <= 0 {
err := sia.RegisterInstance(data, ztsUrl, identityDocument, opts)
if err != nil {
log.Fatalf("Register identity failed, error: %v\n", err)
}
} else {
initialSetup = false
log.Println("Identity certificate file already exists. Retrieving identity details...")
}
log.Printf("Identity established for services: %s\n", svcs)
stop := make(chan bool, 1)
errors := make(chan error, 1)
go func() {
for {
log.Printf("Identity being used: %s\n", opts.Name)
// if we just did our initial setup there is no point
// to refresh the certs again. so we are going to skip
// this time around and refresh certs next time
if !initialSetup {
data, err := getAttestationData(*ztsResourceUri, identityDocument, opts)
if err != nil {
errors <- fmt.Errorf("Cannot get attestation data: %v\n", err)
return
}
err = sia.RefreshInstance(data, ztsUrl, identityDocument, opts)
if err != nil {
errors <- fmt.Errorf("refresh identity failed, error: %v", err)
return
}
log.Printf("identity successfully refreshed for services: %s\n", svcs)
} else {
initialSetup = false
}
sia.GetRoleCertificate(ztsUrl,
fmt.Sprintf("%s/%s.%s.key.pem", opts.KeyDir, opts.Domain, opts.Services[0].Name),
fmt.Sprintf("%s/%s.%s.cert.pem", opts.CertDir, opts.Domain, opts.Services[0].Name),
opts,
)
select {
case <-stop:
errors <- nil
return
case <-time.After(rotationInterval):
break
}
}
}() | signal.Notify(signals, os.Interrupt, syscall.SIGTERM)
sig := <-signals
log.Printf("Received signal %v, stopping rotation\n", sig)
stop <- true
}()
err = <-errors
if err != nil {
log.Printf("%v\n", err)
}
}
os.Exit(0)
}
// getAttestationData fetches attestation data for all the services mentioned in the config file
func getAttestationData(resourceUri string, identityDocument *attestation.IdentityDocument, opts *options.Options) ([]*attestation.Data, error) {
var data []*attestation.Data
for _, svc := range opts.Services {
a, err := attestation.New(opts.Domain, svc.Name, MetaEndPoint, ApiVersion, resourceUri, identityDocument)
if err != nil {
return nil, err
}
data = append(data, a)
}
return data, nil
} |
go func() {
signals := make(chan os.Signal, 2) |
random number generation.py | import random
| x=random.random()
print("The Random number is",round(x,3)) |
|
setup.py | #!/usr/bin/env python
from os.path import join, dirname, abspath
from setuptools import setup
def read(rel_path):
here = abspath(dirname(__file__))
with open(join(here, rel_path)) as fp:
return fp.read()
def get_version(rel_path):
for line in read(rel_path).splitlines():
if line.startswith("__version__"):
delim = '"' if '"' in line else "'"
return line.split(delim)[1]
raise RuntimeError("Unable to find version string.")
REQUIREMENTS = read('requirements.txt').splitlines()
DESCRIPTION = read('README.md') | setup(name='robotframework-csvlibrary',
version=get_version("CSVLibrary/__init__.py"),
description='CSV library for Robot Framework',
long_description=DESCRIPTION,
long_description_content_type='text/markdown',
author='Marcin Mierzejewski',
author_email='<[email protected]>',
url='https://github.com/s4int/robotframework-CSVLibrary',
license='Apache License 2.0',
keywords='robotframework testing csv',
platforms='any',
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Software Development :: Testing",
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
],
install_requires=REQUIREMENTS,
packages=['CSVLibrary'],
) | |
predict.py | from tensorflow.keras.models import load_model
from clean import downsample_mono, envelope
from kapre.time_frequency import STFT, Magnitude, ApplyFilterbank, MagnitudeToDecibel
from sklearn.preprocessing import LabelEncoder
import numpy as np
from glob import glob
import argparse
import os
import pandas as pd
from tqdm import tqdm
def make_prediction(args):
# load the model
|
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Audio Classification Training')
parser.add_argument('--model_fn', type=str, default='models/lstm.h5',
help='model file to make predictions')
parser.add_argument('--pred_fn', type=str, default='y_pred',
help='fn to write predictions in logs dir')
parser.add_argument('--src_dir', type=str, default='wavfiles',
help='directory containing wavfiles to predict')
parser.add_argument('--dt', type=float, default=1.0,
help='time in seconds to sample audio')
parser.add_argument('--sr', type=int, default=16000,
help='sample rate of clean audio')
parser.add_argument('--threshold', type=str, default=20,
help='threshold magnitude for np.int16 dtype')
args, _ = parser.parse_known_args()
make_prediction(args)
| model = load_model(args.model_fn,
custom_objects={'STFT': STFT,
'Magnitude': Magnitude,
'ApplyFilterbank': ApplyFilterbank,
'MagnitudeToDecibel': MagnitudeToDecibel})
# find the sound data
wav_paths = glob('{}/**'.format(args.src_dir), recursive=True)
wav_paths = sorted([x.replace(os.sep, '/') for x in wav_paths if '.wav' in x])
classes = sorted(os.listdir(args.src_dir))
labels = [os.path.split(x)[0].split('/')[-1] for x in wav_paths]
le = LabelEncoder()
y_true = le.fit_transform(labels)
results = []
for z, wav_fn in tqdm(enumerate(wav_paths), total=len(wav_paths)):
rate, wav = downsample_mono(wav_fn, args.sr)
mask, env = envelope(wav, rate, threshold=args.threshold)
clean_wav = wav[mask]
step = int(args.sr * args.dt)
batch = []
for i in range(0, clean_wav.shape[0], step):
sample = clean_wav[i:i + step]
sample = sample.reshape(-1, 1)
if sample.shape[0] < step:
tmp = np.zeros(shape=(step, 1), dtype=np.float32)
tmp[:sample.shape[0], :] = sample.flatten().reshape(-1, 1)
sample = tmp
batch.append(sample)
X_batch = np.array(batch, dtype=np.float32)
y_pred = model.predict(X_batch)
y_mean = np.mean(y_pred, axis=0)
y_pred = np.argmax(y_mean)
real_class = os.path.dirname(wav_fn).split('/')[-1]
print('Actual class: {}, Predicted class: {}'.format(real_class, classes[y_pred]))
results.append(y_mean)
np.save(os.path.join('logs', args.pred_fn), np.array(results)) |
utils.py | """
Copyright (C) 2019 Interactive Brokers LLC. All rights reserved. This code is subject to the terms
and conditions of the IB API Non-Commercial License or the IB API Commercial License, as applicable.
"""
"""
Collection of misc tools
"""
import sys
import logging
import inspect
from ibapi.common import UNSET_INTEGER, UNSET_DOUBLE
logger = logging.getLogger(__name__)
# I use this just to visually emphasize it's a wrapper overriden method
def iswrapper(fn):
return fn
class BadMessage(Exception):
def __init__(self, text):
self.text = text
class LogFunction(object):
def __init__(self, text, logLevel):
self.text = text
self.logLevel = logLevel
def __call__(self, fn):
def newFn(origSelf, *args, **kwargs):
if logger.getLogger().isEnabledFor(self.logLevel):
argNames = [argName for argName in inspect.getfullargspec(fn)[0] if argName != 'self']
logger.log(self.logLevel,
"{} {} {} kw:{}".format(self.text, fn.__name__,
[nameNarg for nameNarg in zip(argNames, args) if nameNarg[1] is not origSelf], kwargs))
fn(origSelf, *args)
return newFn
def current_fn_name(parent_idx = 0):
#depth is 1 bc this is already a fn, so we need the caller
return sys._getframe(1 + parent_idx).f_code.co_name
def setattr_log(self, var_name, var_value):
#import code; code.interact(local=locals())
logger.debug("%s %s %s=|%s|", self.__class__, id(self), var_name, var_value)
super(self.__class__, self).__setattr__(var_name, var_value)
SHOW_UNSET = True
def decode(the_type, fields, show_unset = False):
try:
s = next(fields)
except StopIteration:
raise BadMessage("no more fields")
logger.debug("decode %s %s", the_type, s)
if the_type is str:
if type(s) is str:
return s
elif type(s) is bytes:
return s.decode(errors='backslashreplace')
else:
raise TypeError("unsupported incoming type " + type(s) + " for desired type 'str")
orig_type = the_type
if the_type is bool:
the_type = int
if show_unset:
if s is None or len(s) == 0:
if the_type is float:
n = UNSET_DOUBLE
elif the_type is int:
n = UNSET_INTEGER
else:
raise TypeError("unsupported desired type for empty value" + the_type)
else:
n = the_type(s)
else:
|
if orig_type is bool:
n = False if n == 0 else True
return n
def ExerciseStaticMethods(klass):
import types
#import code; code.interact(local=dict(globals(), **locals()))
for (_, var) in inspect.getmembers(klass):
#print(name, var, type(var))
if type(var) == types.FunctionType:
print("Exercising: %s:" % var)
print(var())
print()
def floatToStr(val):
return str(val) if val != UNSET_DOUBLE else "";
| n = the_type(s or 0) |
lib.rs | #![recursion_limit="128"]
#![doc(html_root_url = "https://api.rocket.rs/master")]
#![doc(html_favicon_url = "https://rocket.rs/images/favicon.ico")]
#![doc(html_logo_url = "https://rocket.rs/images/logo-boxed.png")]
#![warn(rust_2018_idioms)]
//! # Rocket - Code Generation
//!
//! This crate implements the code generation portions of Rocket. This includes
//! custom derives, custom attributes, and procedural macros. The documentation
//! here is purely technical. The code generation facilities are documented
//! thoroughly in the [Rocket programming guide](https://rocket.rs/master/guide).
//!
//! # Usage
//!
//! You **_should not_** directly depend on this library. To use the macros,
//! attributes, and derives in this crate, it suffices to depend on `rocket` in
//! `Cargo.toml`:
//!
//! ```toml
//! [dependencies]
//! rocket = "0.5.0-dev"
//! ```
//!
//! And to import all macros, attributes, and derives via `#[macro_use]` in the
//! crate root:
//!
//! ```rust
//! #[macro_use] extern crate rocket;
//! # #[get("/")] fn hello() { }
//! # fn main() { rocket::ignite().mount("/", routes![hello]); }
//! ```
//!
//! Or, alternatively, selectively import from the top-level scope:
//!
//! ```rust
//! # extern crate rocket;
//!
//! use rocket::{get, routes};
//! # #[get("/")] fn hello() { }
//! # fn main() { rocket::ignite().mount("/", routes![hello]); }
//! ```
//!
//! # Debugging Codegen
//!
//! When the `ROCKET_CODEGEN_DEBUG` environment variable is set, this crate
//! logs, at compile-time and to the console, the items it generates. For
//! example, you might run the following to build a Rocket application with
//! codegen debug logging enabled:
//!
//! ```sh
//! ROCKET_CODEGEN_DEBUG=1 cargo build
//! ```
#[macro_use] extern crate quote;
use rocket_http as http;
macro_rules! vars_and_mods {
($($name:ident => $path:path,)*) => {
macro_rules! define {
// Note: the `o` is to capture the input's span
$(($i:ident $name) => {
#[allow(non_snake_case)] let $i = quote!($path);
};)*
$(($span:expr => $i:ident $name) => {
#[allow(non_snake_case)] let $i = quote_spanned!($span => $path);
};)*
}
}
}
vars_and_mods! {
req => __req,
status => __status,
catcher => __catcher,
data => __data,
error => __error,
trail => __trail,
request => rocket::request,
response => rocket::response,
handler => rocket::handler,
log => rocket::logger,
Outcome => rocket::outcome::Outcome,
FromTransformedData => rocket::data::FromTransformedData,
Transform => rocket::data::Transform,
Query => rocket::request::Query,
FromFormValue => rocket::request::FromFormValue,
Request => rocket::request::Request,
Response => rocket::response::Response,
Data => rocket::data::Data,
StaticRouteInfo => rocket::StaticRouteInfo,
StaticCatcherInfo => rocket::StaticCatcherInfo,
Route => rocket::Route,
Catcher => rocket::Catcher,
SmallVec => rocket::http::private::SmallVec,
Status => rocket::http::Status,
HandlerFuture => rocket::handler::HandlerFuture,
ErrorHandlerFuture => rocket::catcher::ErrorHandlerFuture,
_Option => ::std::option::Option,
_Result => ::std::result::Result,
_Some => ::std::option::Option::Some,
_None => ::std::option::Option::None,
_Ok => ::std::result::Result::Ok,
_Err => ::std::result::Result::Err,
_Box => ::std::boxed::Box,
_Vec => ::std::vec::Vec,
}
macro_rules! define_vars_and_mods {
($($name:ident),*) => ($(define!($name $name);)*);
($span:expr => $($name:ident),*) => ($(define!($span => $name $name);)*)
}
#[macro_use]
mod proc_macro_ext;
mod derive;
mod attribute;
mod bang;
mod http_codegen;
mod syn_ext;
use crate::http::Method;
use proc_macro::TokenStream;
use devise::{proc_macro2, syn};
static URI_MACRO_PREFIX: &str = "rocket_uri_macro_";
static ROCKET_PARAM_PREFIX: &str = "__rocket_param_";
macro_rules! emit {
($tokens:expr) => ({
use devise::ext::SpanDiagnosticExt;
let mut tokens = $tokens;
if std::env::var_os("ROCKET_CODEGEN_DEBUG").is_some() {
let debug_tokens = proc_macro2::Span::call_site()
.note("emitting Rocket code generation debug output")
.note(tokens.to_string())
.emit_as_item_tokens();
tokens.extend(debug_tokens);
}
tokens.into()
})
}
macro_rules! route_attribute {
($name:ident => $method:expr) => (
/// Attribute to generate a [`Route`] and associated metadata.
///
/// This and all other route attributes can only be applied to free
/// functions:
///
/// ```rust
/// # #[macro_use] extern crate rocket;
/// #
/// #[get("/")]
/// fn index() -> &'static str {
/// "Hello, world!"
/// }
/// ```
///
/// There are 7 method-specific route attributes:
///
/// * [`get`] - `GET` specific route
/// * [`put`] - `PUT` specific route
/// * [`post`] - `POST` specific route
/// * [`delete`] - `DELETE` specific route
/// * [`head`] - `HEAD` specific route
/// * [`options`] - `OPTIONS` specific route
/// * [`patch`] - `PATCH` specific route
///
/// Additionally, [`route`] allows the method and path to be explicitly
/// specified:
///
/// ```rust
/// # #[macro_use] extern crate rocket;
/// #
/// #[route(GET, path = "/")]
/// fn index() -> &'static str {
/// "Hello, world!"
/// }
/// ```
///
/// [`get`]: attr.get.html
/// [`put`]: attr.put.html
/// [`post`]: attr.post.html
/// [`delete`]: attr.delete.html
/// [`head`]: attr.head.html
/// [`options`]: attr.options.html
/// [`patch`]: attr.patch.html
/// [`route`]: attr.route.html
///
/// # Grammar
///
/// The grammar for all method-specific route attributes is defined as:
///
/// ```text
/// route := '"' path ('?' query)? '"' (',' parameter)*
///
/// path := ('/' segment)*
///
/// query := segment ('&' segment)*
///
/// segment := URI_SEG
/// | SINGLE_PARAM
/// | MULTI_PARAM
///
/// parameter := 'rank' '=' INTEGER
/// | 'format' '=' '"' MEDIA_TYPE '"'
/// | 'data' '=' '"' SINGLE_PARAM '"'
///
/// SINGLE_PARAM := '<' IDENT '>'
/// MULTI_PARAM := '<' IDENT '..>'
///
/// URI_SEG := valid, non-percent-encoded HTTP URI segment
/// MEDIA_TYPE := valid HTTP media type or known shorthand
///
/// INTEGER := unsigned integer, as defined by Rust
/// IDENT := valid identifier, as defined by Rust, except `_`
/// ```
///
/// The generic route attribute is defined as:
///
/// ```text
/// generic-route := METHOD ',' 'path' '=' route
/// ```
///
/// # Typing Requirements
///
/// Every identifier that appears in a dynamic parameter (`SINGLE_PARAM`
/// or `MULTI_PARAM`) must appear as an argument to the function. For
/// example, the following route requires the decorated function to have
/// the arguments `foo`, `baz`, `msg`, `rest`, and `form`:
///
/// ```rust
/// # #[macro_use] extern crate rocket;
/// # use rocket::request::Form;
/// # use std::path::PathBuf;
/// # #[derive(FromForm)] struct F { a: usize }
/// #[get("/<foo>/bar/<baz..>?<msg>&closed&<rest..>", data = "<form>")]
/// # fn f(foo: usize, baz: PathBuf, msg: String, rest: Form<F>, form: Form<F>) { }
/// ```
///
/// The type of each function argument corresponding to a dynamic
/// parameter is required to implement one of Rocket's guard traits. The
/// exact trait that is required to be implemented depends on the kind
/// of dynamic parameter (`SINGLE` or `MULTI`) and where in the route
/// attribute the parameter appears. The table below summarizes trait
/// requirements:
///
/// | position | kind | trait |
/// |----------|-------------|-------------------|
/// | path | `<ident>` | [`FromParam`] |
/// | path | `<ident..>` | [`FromSegments`] |
/// | query | `<ident>` | [`FromFormValue`] |
/// | query | `<ident..>` | [`FromQuery`] |
/// | data | `<ident>` | [`FromTransformedData`] |
///
/// The type of each function argument that _does not_ have a
/// corresponding dynamic parameter is required to implement the
/// [`FromRequest`] trait.
///
/// The return type of the decorated function must implement the
/// [`Responder`] trait.
///
/// [`FromParam`]: ../rocket/request/trait.FromParam.html
/// [`FromSegments`]: ../rocket/request/trait.FromSegments.html
/// [`FromFormValue`]: ../rocket/request/trait.FromFormValue.html
/// [`FromQuery`]: ../rocket/request/trait.FromQuery.html
/// [`FromTransformedData`]: ../rocket/data/trait.FromTransformedData.html
/// [`FromRequest`]: ../rocket/request/trait.FromRequest.html
/// [`Route`]: ../rocket/struct.Route.html
/// [`Responder`]: ../rocket/response/trait.Responder.html
///
/// # Semantics
///
/// The attribute generates three items:
///
/// 1. A route [`Handler`].
///
/// The generated handler validates and generates all arguments for
/// the generated function according to the trait that their type
/// must implement. The order in which arguments are processed is:
///
/// 1. Request guards from left to right.
///
/// If a request guard fails, the request is forwarded if the
/// [`Outcome`] is `Forward` or failed if the [`Outcome`] is
/// `Failure`. See [`FromRequest` Outcomes] for further
/// detail.
///
/// 2. Path and query parameters from left to right as declared
/// in the function argument list. | ///
/// 3. Data parameter, if any.
///
/// If a data guard fails, the request is forwarded if the
/// [`Outcome`] is `Forward` or failed if the [`Outcome`] is
/// `Failure`. See [`FromTransformedData` Outcomes] for further detail.
///
/// If all validation succeeds, the decorated function is called.
/// The returned value is used to generate a [`Response`] via the
/// type's [`Responder`] implementation.
///
/// 2. A static structure used by [`routes!`] to generate a [`Route`].
///
/// The static structure (and resulting [`Route`]) is populated
/// with the name (the function's name), path, query, rank, and
/// format from the route attribute. The handler is set to the
/// generated handler.
///
/// 3. A macro used by [`uri!`] to type-check and generate an
/// [`Origin`].
///
/// [`Handler`]: ../rocket/trait.Handler.html
/// [`routes!`]: macro.routes.html
/// [`uri!`]: macro.uri.html
/// [`Origin`]: ../rocket/http/uri/struct.Origin.html
/// [`Outcome`]: ../rocket/outcome/enum.Outcome.html
/// [`Response`]: ../rocket/struct.Response.html
/// [`FromRequest` Outcomes]: ../rocket/request/trait.FromRequest.html#outcomes
/// [`FromTransformedData` Outcomes]: ../rocket/data/trait.FromTransformedData.html#outcomes
#[proc_macro_attribute]
pub fn $name(args: TokenStream, input: TokenStream) -> TokenStream {
emit!(attribute::route::route_attribute($method, args, input))
}
)
}
route_attribute!(route => None);
route_attribute!(get => Method::Get);
route_attribute!(put => Method::Put);
route_attribute!(post => Method::Post);
route_attribute!(delete => Method::Delete);
route_attribute!(head => Method::Head);
route_attribute!(patch => Method::Patch);
route_attribute!(options => Method::Options);
/// Attribute to generate a [`Catcher`] and associated metadata.
///
/// This attribute can only be applied to free functions:
///
/// ```rust
/// # #[macro_use] extern crate rocket;
/// #
/// use rocket::Request;
/// use rocket::http::Status;
///
/// #[catch(404)]
/// fn not_found(req: &Request) -> String {
/// format!("Sorry, {} does not exist.", req.uri())
/// }
///
/// #[catch(default)]
/// fn default(status: Status, req: &Request) -> String {
/// format!("{} - {} ({})", status.code, status.reason, req.uri())
/// }
/// ```
///
/// # Grammar
///
/// The grammar for the `#[catch]` attributes is defined as:
///
/// ```text
/// catch := STATUS | 'default'
///
/// STATUS := valid HTTP status code (integer in [200, 599])
/// ```
///
/// # Typing Requirements
///
/// The decorated function may take zero, one, or two arguments. It's type
/// signature must be one of the following, where `R:`[`Responder`]:
///
/// * `fn() -> R`
/// * `fn(`[`&Request`]`) -> R`
/// * `fn(`[`Status`]`, `[`&Request`]`) -> R`
///
/// # Semantics
///
/// The attribute generates two items:
///
/// 1. An [`ErrorHandler`].
///
/// The generated handler calls the decorated function, passing in the
/// [`Status`] and [`&Request`] values if requested. The returned value is
/// used to generate a [`Response`] via the type's [`Responder`]
/// implementation.
///
/// 2. A static structure used by [`catchers!`] to generate a [`Catcher`].
///
/// The static structure (and resulting [`Catcher`]) is populated with the
/// name (the function's name) and status code from the route attribute or
/// `None` if `default`. The handler is set to the generated handler.
///
/// [`&Request`]: ../rocket/struct.Request.html
/// [`Status`]: ../rocket/http/struct.Status.html
/// [`ErrorHandler`]: ../rocket/type.ErrorHandler.html
/// [`catchers!`]: macro.catchers.html
/// [`Catcher`]: ../rocket/struct.Catcher.html
/// [`Response`]: ../rocket/struct.Response.html
/// [`Responder`]: ../rocket/response/trait.Responder.html
#[proc_macro_attribute]
pub fn catch(args: TokenStream, input: TokenStream) -> TokenStream {
emit!(attribute::catch::catch_attribute(args, input))
}
/// FIXME: Document.
#[proc_macro_attribute]
pub fn async_test(args: TokenStream, input: TokenStream) -> TokenStream {
emit!(attribute::async_entry::async_test_attribute(args, input))
}
/// FIXME: Document.
#[proc_macro_attribute]
pub fn main(args: TokenStream, input: TokenStream) -> TokenStream {
emit!(attribute::async_entry::main_attribute(args, input))
}
/// FIXME: Document.
#[proc_macro_attribute]
pub fn launch(args: TokenStream, input: TokenStream) -> TokenStream {
emit!(attribute::async_entry::launch_attribute(args, input))
}
/// Derive for the [`FromFormValue`] trait.
///
/// The [`FromFormValue`] derive can be applied to enums with nullary
/// (zero-length) fields:
///
/// ```rust
/// # #[macro_use] extern crate rocket;
/// #
/// #[derive(FromFormValue)]
/// enum MyValue {
/// First,
/// Second,
/// Third,
/// }
/// ```
///
/// The derive generates an implementation of the [`FromFormValue`] trait for
/// the decorated `enum`. The implementation returns successfully when the form
/// value matches, case insensitively, the stringified version of a variant's
/// name, returning an instance of said variant. If there is no match, an error
/// ([`FromFormValue::Error`]) of type [`&RawStr`] is returned, the value of
/// which is the raw form field value that failed to match.
///
/// As an example, for the `enum` above, the form values `"first"`, `"FIRST"`,
/// `"fiRSt"`, and so on would parse as `MyValue::First`, while `"second"` and
/// `"third"` would parse as `MyValue::Second` and `MyValue::Third`,
/// respectively.
///
/// The `form` field attribute can be used to change the string that is compared
/// against for a given variant:
///
/// ```rust
/// # #[macro_use] extern crate rocket;
/// #
/// #[derive(FromFormValue)]
/// enum MyValue {
/// First,
/// Second,
/// #[form(value = "fourth")]
/// Third,
/// }
/// ```
///
/// The `#[form]` attribute's grammar is:
///
/// ```text
/// form := 'field' '=' STRING_LIT
///
/// STRING_LIT := any valid string literal, as defined by Rust
/// ```
///
/// The attribute accepts a single string parameter of name `value`
/// corresponding to the string to use to match against for the decorated
/// variant. In the example above, the the strings `"fourth"`, `"FOUrth"` and so
/// on would parse as `MyValue::Third`.
///
/// [`FromFormValue`]: ../rocket/request/trait.FromFormValue.html
/// [`FromFormValue::Error`]: ../rocket/request/trait.FromFormValue.html#associatedtype.Error
/// [`&RawStr`]: ../rocket/http/struct.RawStr.html
// FIXME(rustdoc): We should be able to refer to items in `rocket`.
#[proc_macro_derive(FromFormValue, attributes(form))]
pub fn derive_from_form_value(input: TokenStream) -> TokenStream {
emit!(derive::from_form_value::derive_from_form_value(input))
}
/// Derive for the [`FromForm`] trait.
///
/// The [`FromForm`] derive can be applied to structures with named fields:
///
/// ```rust
/// # #[macro_use] extern crate rocket;
/// #
/// #[derive(FromForm)]
/// struct MyStruct {
/// field: usize,
/// other: String
/// }
/// ```
///
/// Each field's type is required to implement [`FromFormValue`].
///
/// The derive generates an implementation of the [`FromForm`] trait. The
/// implementation parses a form whose field names match the field names of the
/// structure on which the derive was applied. Each field's value is parsed with
/// the [`FromFormValue`] implementation of the field's type. The `FromForm`
/// implementation succeeds only when all of the field parses succeed. If
/// parsing fails, an error ([`FromForm::Error`]) of type [`FormParseError`] is
/// returned.
///
/// The derive accepts one field attribute: `form`, with the following syntax:
///
/// ```text
/// form := 'field' '=' '"' IDENT '"'
///
/// IDENT := valid identifier, as defined by Rust
/// ```
///
/// When applied, the attribute looks as follows:
///
/// ```rust
/// # #[macro_use] extern crate rocket;
/// #
/// #[derive(FromForm)]
/// struct MyStruct {
/// field: usize,
/// #[form(field = "renamed_field")]
/// other: String
/// }
/// ```
///
/// The field attribute directs that a different incoming field name is
/// expected, and the value of the `field` attribute is used instead of the
/// structure's actual field name when parsing a form. In the example above, the
/// value of the `MyStruct::other` struct field will be parsed from the incoming
/// form's `renamed_field` field.
///
/// [`FromForm`]: ../rocket/request/trait.FromForm.html
/// [`FromFormValue`]: ../rocket/request/trait.FromFormValue.html
/// [`FormParseError`]: ../rocket/request/enum.FormParseError.html
/// [`FromForm::Error`]: ../rocket/request/trait.FromForm.html#associatedtype.Error
#[proc_macro_derive(FromForm, attributes(form))]
pub fn derive_from_form(input: TokenStream) -> TokenStream {
emit!(derive::from_form::derive_from_form(input))
}
/// Derive for the [`Responder`] trait.
///
/// The [`Responder`] derive can be applied to enums and structs with named
/// fields. When applied to enums, variants must have at least one field. When
/// applied to structs, the struct must have at least one field.
///
/// ```rust
/// # #[macro_use] extern crate rocket;
/// # use std::fs::File;
/// # use rocket::http::ContentType;
/// # type OtherResponder = MyResponderA;
/// #
/// #[derive(Responder)]
/// enum MyResponderA {
/// A(String),
/// B(File, ContentType),
/// }
///
/// #[derive(Responder)]
/// struct MyResponderB {
/// inner: OtherResponder,
/// header: ContentType,
/// }
/// ```
///
/// The derive generates an implementation of the [`Responder`] trait for the
/// decorated enum or structure. The derive uses the _first_ field of a variant
/// or structure to generate a [`Response`]. As such, the type of the first
/// field must implement [`Responder`]. The remaining fields of a variant or
/// structure are set as headers in the produced [`Response`] using
/// [`Response::set_header()`]. As such, every other field (unless explicitly
/// ignored, explained next) must implement `Into<Header>`.
///
/// Except for the first field, fields decorated with `#[response(ignore)]` are
/// ignored by the derive:
///
/// ```rust
/// # #[macro_use] extern crate rocket;
/// # use std::fs::File;
/// # use rocket::http::ContentType;
/// # use rocket::response::NamedFile;
/// # type Other = usize;
/// #
/// #[derive(Responder)]
/// enum MyResponder {
/// A(String),
/// B(File, ContentType, #[response(ignore)] Other),
/// }
///
/// #[derive(Responder)]
/// struct MyOtherResponder {
/// inner: NamedFile,
/// header: ContentType,
/// #[response(ignore)]
/// other: Other,
/// }
/// ```
///
/// Decorating the first field with `#[response(ignore)]` has no effect.
///
/// Additionally, the `response` attribute can be used on named structures and
/// enum variants to override the status and/or content-type of the [`Response`]
/// produced by the generated implementation. The `response` attribute used in
/// these positions has the following grammar:
///
/// ```text
/// response := parameter (',' parameter)?
///
/// parameter := 'status' '=' STATUS
/// | 'content_type' '=' CONTENT_TYPE
///
/// STATUS := unsigned integer >= 100 and < 600
/// CONTENT_TYPE := string literal, as defined by Rust, identifying a valid
/// Content-Type, as defined by Rocket
/// ```
///
/// It can be used as follows:
///
/// ```rust
/// # #[macro_use] extern crate rocket;
/// # use rocket::http::ContentType;
/// # use rocket::response::NamedFile;
/// # type Other = usize;
/// # type InnerResponder = String;
/// #
/// #[derive(Responder)]
/// enum Error {
/// #[response(status = 500, content_type = "json")]
/// A(String),
/// #[response(status = 404)]
/// B(NamedFile, ContentType),
/// }
///
/// #[derive(Responder)]
/// #[response(status = 400)]
/// struct MyResponder {
/// inner: InnerResponder,
/// header: ContentType,
/// #[response(ignore)]
/// other: Other,
/// }
/// ```
///
/// The attribute accepts two key/value pairs: `status` and `content_type`. The
/// value of `status` must be an unsigned integer representing a valid status
/// code. The [`Response`] produced from the generated implementation will have
/// its status overridden to this value.
///
/// The value of `content_type` must be a valid media-type in `top/sub` form or
/// `shorthand` form. Examples include:
///
/// * `"text/html"`
/// * `"application/x-custom"`
/// * `"html"`
/// * `"json"`
/// * `"plain"`
/// * `"binary"`
///
/// See [`ContentType::parse_flexible()`] for a full list of available
/// shorthands. The [`Response`] produced from the generated implementation will
/// have its content-type overridden to this value.
///
/// [`Responder`]: ../rocket/response/trait.Responder.html
/// [`Response`]: ../rocket/struct.Response.html
/// [`Response::set_header()`]: ../rocket/response/struct.Response.html#method.set_header
/// [`ContentType::parse_flexible()`]: ../rocket/http/struct.ContentType.html#method.parse_flexible
#[proc_macro_derive(Responder, attributes(response))]
pub fn derive_responder(input: TokenStream) -> TokenStream {
emit!(derive::responder::derive_responder(input))
}
/// Derive for the [`UriDisplay<Query>`] trait.
///
/// The [`UriDisplay<Query>`] derive can be applied to enums and structs. When
/// applied to enums, variants must have at least one field. When applied to
/// structs, the struct must have at least one field.
///
/// ```rust
/// # #[macro_use] extern crate rocket;
/// #[derive(UriDisplayQuery)]
/// enum Kind {
/// A(String),
/// B(usize),
/// }
///
/// #[derive(UriDisplayQuery)]
/// struct MyStruct {
/// name: String,
/// id: usize,
/// kind: Kind,
/// }
/// ```
///
/// Each field's type is required to implement [`UriDisplay<Query>`].
///
/// The derive generates an implementation of the [`UriDisplay<Query>`] trait.
/// The implementation calls [`Formatter::write_named_value()`] for every named
/// field, using the field's name (unless overridden, explained next) as the
/// `name` parameter, and [`Formatter::write_value()`] for every unnamed field
/// in the order the fields are declared.
///
/// The derive accepts one field attribute: `form`, with the following syntax:
///
/// ```text
/// form := 'field' '=' '"' IDENT '"'
///
/// IDENT := valid identifier, as defined by Rust
/// ```
///
/// When applied, the attribute looks as follows:
///
/// ```rust
/// # #[macro_use] extern crate rocket;
/// # #[derive(UriDisplayQuery)]
/// # struct Kind(String);
/// #[derive(UriDisplayQuery)]
/// struct MyStruct {
/// name: String,
/// id: usize,
/// #[form(field = "type")]
/// kind: Kind,
/// }
/// ```
///
/// The field attribute directs that a different field name be used when calling
/// [`Formatter::write_named_value()`] for the given field. The value of the
/// `field` attribute is used instead of the structure's actual field name. In
/// the example above, the field `MyStruct::kind` is rendered with a name of
/// `type`.
///
/// [`UriDisplay<Query>`]: ../rocket/http/uri/trait.UriDisplay.html
/// [`Formatter::write_named_value()`]: ../rocket/http/uri/struct.Formatter.html#method.write_named_value
/// [`Formatter::write_value()`]: ../rocket/http/uri/struct.Formatter.html#method.write_value
#[proc_macro_derive(UriDisplayQuery, attributes(form))]
pub fn derive_uri_display_query(input: TokenStream) -> TokenStream {
emit!(derive::uri_display::derive_uri_display_query(input))
}
/// Derive for the [`UriDisplay<Path>`] trait.
///
/// The [`UriDisplay<Path>`] derive can only be applied to tuple structs with
/// one field.
///
/// ```rust
/// # #[macro_use] extern crate rocket;
/// #[derive(UriDisplayPath)]
/// struct Name(String);
///
/// #[derive(UriDisplayPath)]
/// struct Age(usize);
/// ```
///
/// The field's type is required to implement [`UriDisplay<Path>`].
///
/// The derive generates an implementation of the [`UriDisplay<Path>`] trait.
/// The implementation calls [`Formatter::write_value()`] for the field.
///
/// [`UriDisplay<Path>`]: ../rocket/http/uri/trait.UriDisplay.html
/// [`Formatter::write_value()`]: ../rocket/http/uri/struct.Formatter.html#method.write_value
#[proc_macro_derive(UriDisplayPath)]
pub fn derive_uri_display_path(input: TokenStream) -> TokenStream {
emit!(derive::uri_display::derive_uri_display_path(input))
}
/// Generates a [`Vec`] of [`Route`]s from a set of route paths.
///
/// The `routes!` macro expands a list of route paths into a [`Vec`] of their
/// corresponding [`Route`] structures. For example, given the following routes:
///
/// ```rust
/// # #[macro_use] extern crate rocket;
/// #
/// #[get("/")]
/// fn index() { /* .. */ }
///
/// mod person {
/// #[post("/hi/<person>")]
/// pub fn hello(person: String) { /* .. */ }
/// }
/// ```
///
/// The `routes!` macro can be used as:
///
/// ```rust
/// # #[macro_use] extern crate rocket;
/// #
/// # use rocket::http::Method;
/// #
/// # #[get("/")] fn index() { /* .. */ }
/// # mod person {
/// # #[post("/hi/<person>")] pub fn hello(person: String) { /* .. */ }
/// # }
/// let my_routes = routes![index, person::hello];
/// assert_eq!(my_routes.len(), 2);
///
/// let index_route = &my_routes[0];
/// assert_eq!(index_route.method, Method::Get);
/// assert_eq!(index_route.name, Some("index"));
/// assert_eq!(index_route.uri.path(), "/");
///
/// let hello_route = &my_routes[1];
/// assert_eq!(hello_route.method, Method::Post);
/// assert_eq!(hello_route.name, Some("hello"));
/// assert_eq!(hello_route.uri.path(), "/hi/<person>");
/// ```
///
/// The grammar for `routes!` is defined as:
///
/// ```text
/// routes := PATH (',' PATH)*
///
/// PATH := a path, as defined by Rust
/// ```
///
/// [`Route`]: ../rocket/struct.Route.html
#[proc_macro]
pub fn routes(input: TokenStream) -> TokenStream {
emit!(bang::routes_macro(input))
}
/// Generates a [`Vec`] of [`Catcher`]s from a set of catcher paths.
///
/// The `catchers!` macro expands a list of catcher paths into a [`Vec`] of
/// their corresponding [`Catcher`] structures. For example, given the following
/// catchers:
///
/// ```rust
/// # #[macro_use] extern crate rocket;
/// #
/// #[catch(404)]
/// fn not_found() { /* .. */ }
///
/// mod inner {
/// #[catch(400)]
/// pub fn unauthorized() { /* .. */ }
/// }
///
/// #[catch(default)]
/// fn default_catcher() { /* .. */ }
/// ```
///
/// The `catchers!` macro can be used as:
///
/// ```rust
/// # #[macro_use] extern crate rocket;
/// #
/// # #[catch(404)] fn not_found() { /* .. */ }
/// # #[catch(default)] fn default_catcher() { /* .. */ }
/// # mod inner {
/// # #[catch(400)] pub fn unauthorized() { /* .. */ }
/// # }
/// let my_catchers = catchers![not_found, inner::unauthorized, default_catcher];
/// assert_eq!(my_catchers.len(), 3);
///
/// let not_found = &my_catchers[0];
/// assert_eq!(not_found.code, Some(404));
///
/// let unauthorized = &my_catchers[1];
/// assert_eq!(unauthorized.code, Some(400));
///
/// let default = &my_catchers[2];
/// assert_eq!(default.code, None);
/// ```
///
/// The grammar for `catchers!` is defined as:
///
/// ```text
/// catchers := PATH (',' PATH)*
///
/// PATH := a path, as defined by Rust
/// ```
///
/// [`Catcher`]: ../rocket/struct.Catcher.html
#[proc_macro]
pub fn catchers(input: TokenStream) -> TokenStream {
emit!(bang::catchers_macro(input))
}
/// Type-safe, URI-safe generation of an [`Origin`] URI from a route.
///
/// The `uri!` macro creates a type-safe, URL-safe URI given a route and values
/// for the route's URI parameters. The inputs to the macro are the path to a
/// route, a colon, and one argument for each dynamic parameter (parameters in
/// `<>`) in the route's path and query.
///
/// For example, for the following route:
///
/// ```rust
/// # #[macro_use] extern crate rocket;
/// #
/// #[get("/person/<name>?<age>")]
/// fn person(name: String, age: Option<u8>) -> String {
/// # "".into() /*
/// ...
/// # */
/// }
/// ```
///
/// A URI can be created as follows:
///
/// ```rust
/// # #[macro_use] extern crate rocket;
/// #
/// # #[get("/person/<name>?<age>")]
/// # fn person(name: String, age: Option<u8>) { }
/// #
/// // with unnamed parameters, in route path declaration order
/// let mike = uri!(person: "Mike Smith", Some(28));
/// assert_eq!(mike.to_string(), "/person/Mike%20Smith?age=28");
///
/// // with named parameters, order irrelevant
/// let mike = uri!(person: name = "Mike", age = Some(28));
/// let mike = uri!(person: age = Some(28), name = "Mike");
/// assert_eq!(mike.to_string(), "/person/Mike?age=28");
///
/// // with a specific mount-point
/// let mike = uri!("/api", person: name = "Mike", age = Some(28));
/// assert_eq!(mike.to_string(), "/api/person/Mike?age=28");
///
/// // with unnamed values ignored
/// let mike = uri!(person: "Mike", _);
/// assert_eq!(mike.to_string(), "/person/Mike");
///
/// // with unnamed values, explicitly `None`.
/// let option: Option<u8> = None;
/// let mike = uri!(person: "Mike", option);
/// assert_eq!(mike.to_string(), "/person/Mike");
///
/// // with named values ignored
/// let mike = uri!(person: name = "Mike", age = _);
/// assert_eq!(mike.to_string(), "/person/Mike");
///
/// // with named values, explicitly `None`
/// let option: Option<u8> = None;
/// let mike = uri!(person: name = "Mike", age = option);
/// assert_eq!(mike.to_string(), "/person/Mike");
/// ```
///
/// ## Grammar
///
/// The grammar for the `uri!` macro is:
///
/// ```text
/// uri := (mount ',')? PATH (':' params)?
///
/// mount = STRING
/// params := unnamed | named
/// unnamed := expr (',' expr)*
/// named := IDENT = expr (',' named)?
/// expr := EXPR | '_'
///
/// EXPR := a valid Rust expression (examples: `foo()`, `12`, `"hey"`)
/// IDENT := a valid Rust identifier (examples: `name`, `age`)
/// STRING := an uncooked string literal, as defined by Rust (example: `"hi"`)
/// PATH := a path, as defined by Rust (examples: `route`, `my_mod::route`)
/// ```
///
/// ## Semantics
///
/// The `uri!` macro returns an [`Origin`] structure with the URI of the
/// supplied route interpolated with the given values. Note that `Origin`
/// implements `Into<Uri>` (and by extension, `TryInto<Uri>`), so it can be
/// converted into a [`Uri`] using `.into()` as needed.
///
/// A `uri!` invocation only typechecks if the type of every value in the
/// invocation matches the type declared for the parameter in the given route,
/// after conversion with [`FromUriParam`], or if a value is ignored using `_`
/// and the corresponding route type implements [`Ignorable`].
///
/// Each value passed into `uri!` is rendered in its appropriate place in the
/// URI using the [`UriDisplay`] implementation for the value's type. The
/// `UriDisplay` implementation ensures that the rendered value is URI-safe.
///
/// If a mount-point is provided, the mount-point is prepended to the route's
/// URI.
///
/// ### Conversion
///
/// The [`FromUriParam`] trait is used to typecheck and perform a conversion for
/// each value passed to `uri!`. If a `FromUriParam<P, S>` implementation exists
/// for a type `T` for part URI part `P`, then a value of type `S` can be used
/// in `uri!` macro for a route URI parameter declared with a type of `T` in
/// part `P`. For example, the following implementation, provided by Rocket,
/// allows an `&str` to be used in a `uri!` invocation for route URI parameters
/// declared as `String`:
///
/// ```rust,ignore
/// impl<P: UriPart, 'a> FromUriParam<P, &'a str> for String { .. }
/// ```
///
/// ### Ignorables
///
/// Query parameters can be ignored using `_` in place of an expression. The
/// corresponding type in the route URI must implement [`Ignorable`]. Ignored
/// parameters are not interpolated into the resulting `Origin`. Path parameters
/// are not ignorable.
///
/// [`Uri`]: ../rocket/http/uri/enum.Uri.html
/// [`Origin`]: ../rocket/http/uri/struct.Origin.html
/// [`FromUriParam`]: ../rocket/http/uri/trait.FromUriParam.html
/// [`UriDisplay`]: ../rocket/http/uri/trait.UriDisplay.html
/// [`Ignorable`]: ../rocket/http/uri/trait.Ignorable.html
#[proc_macro]
pub fn uri(input: TokenStream) -> TokenStream {
emit!(bang::uri_macro(input))
}
#[doc(hidden)]
#[proc_macro]
pub fn rocket_internal_uri(input: TokenStream) -> TokenStream {
emit!(bang::uri_internal_macro(input))
}
#[doc(hidden)]
#[proc_macro]
pub fn rocket_internal_guide_tests(input: TokenStream) -> TokenStream {
emit!(bang::guide_tests_internal(input))
} | ///
/// If a path or query parameter guard fails, the request is
/// forwarded. |
mixin.py | class TransactionHooksDatabaseWrapperMixin(object):
"""
A ``DatabaseWrapper`` mixin to implement transaction-committed hooks.
To use, create a package for your custom database backend and place a
``base.py`` module within it. Import whatever ``DatabaseWrapper`` you want
to subclass (under some other name), and then create a ``DatabaseWrapper``
class which inherits from both this mixin and the parent
``DatabaseWrapper`` (in that order).
For an example, see ``backends/postgresql_psycopg2/base.py``.
"""
def | (self, *a, **kw):
# a list of no-argument functions to run when the transaction commits;
# each entry is an (sids, func) tuple, where sids is a list of the
# active savepoint IDs when this function was registered
self.run_on_commit = []
# Should we run the on-commit hooks the next time set_autocommit(True)
# is called?
self.run_commit_hooks_on_set_autocommit_on = False
super(TransactionHooksDatabaseWrapperMixin, self).__init__(*a, **kw)
def on_commit(self, func):
if self.in_atomic_block:
# transaction in progress; save for execution on commit
self.run_on_commit.append((self.savepoint_ids[:], func))
else:
# no transaction in progress; execute immediately
func()
def run_and_clear_commit_hooks(self):
self.validate_no_atomic_block()
try:
while self.run_on_commit:
sids, func = self.run_on_commit.pop(0)
func()
finally:
self.run_on_commit = []
def commit(self, *a, **kw):
super(TransactionHooksDatabaseWrapperMixin, self).commit(*a, **kw)
# Atomic has not had a chance yet to restore autocommit on this
# connection, so on databases that handle autocommit correctly, we need
# to wait to run the hooks until it calls set_autocommit(True)
if self.features.autocommits_when_autocommit_is_off:
self.run_and_clear_commit_hooks()
else:
self.run_commit_hooks_on_set_autocommit_on = True
def set_autocommit(self, autocommit):
super(TransactionHooksDatabaseWrapperMixin, self).set_autocommit(
autocommit)
if autocommit and self.run_commit_hooks_on_set_autocommit_on:
self.run_and_clear_commit_hooks()
self.run_commit_hooks_on_set_autocommit_on = False
def savepoint_rollback(self, sid, *a, **kw):
super(TransactionHooksDatabaseWrapperMixin, self).savepoint_rollback(
sid, *a, **kw)
# remove any callbacks registered while this savepoint was active
self.run_on_commit = list(filter(
lambda x: sid not in x[0], self.run_on_commit))
def rollback(self, *a, **kw):
super(TransactionHooksDatabaseWrapperMixin, self).rollback(*a, **kw)
self.run_on_commit = []
def connect(self, *a, **kw):
super(TransactionHooksDatabaseWrapperMixin, self).connect(*a, **kw)
self.run_on_commit = []
def close(self, *a, **kw):
super(TransactionHooksDatabaseWrapperMixin, self).close(*a, **kw)
self.run_on_commit = []
| __init__ |
views.py | from django.shortcuts import render
# Create your views here.
from django.http import HttpResponse
def | (request):
return HttpResponse('TEST URL')
| index |
lib.rs | // Copyright 2021 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use {
anyhow::Result,
errors::{ffx_bail, ffx_error},
ffx_core::ffx_plugin,
ffx_pdk_lib::groups::{ArtifactStore, ArtifactStoreEntry, ArtifactStoreGroup},
ffx_pdk_lib::lock::{Lock, LockArtifact, LockArtifactStore},
ffx_pdk_lib::spec::{Spec, SpecArtifactStore, SpecArtifactStoreKind},
ffx_pdk_update_args::UpdateCommand,
fuchsia_hyper::new_https_client,
fuchsia_pkg::MetaContents,
futures_lite::io::AsyncWriteExt,
hyper::body::HttpBody,
hyper::{body, StatusCode, Uri},
serde_json::{json, Map, Value},
serde_json5,
std::cmp::Ordering,
std::fs::{read_to_string, File, OpenOptions},
std::io::BufReader,
std::path::PathBuf,
};
// Outputs artifacts to a lock file based on a general specification.
//
// Updates the artifacts by matching the available artifacts in an
// artifact store against the constraints in a specification
// (artifact_spec.json).
// URL path to artifact_groups.json for tuf artifact store
//
const TUF_ARTIFACT_GROUPS_PATH: &str = "targets/artifact_groups.json";
#[ffx_plugin("ffx_pdk")]
pub async fn cmd_update(cmd: UpdateCommand) -> Result<()> {
let spec: Spec = read_to_string(cmd.spec_file.clone())
.map_err(|e| ffx_error!(r#"Cannot open spec file "{}": {}"#, cmd.spec_file.display(), e))
.and_then(|contents| {
serde_json5::from_str(&contents).map_err(|e| {
ffx_error!(r#"JSON5 error from spec file "{}": {}"#, cmd.spec_file.display(), e)
})
})?;
process_spec(&spec, &cmd).await?;
println!("Spec file for product \"{}\" processed.", spec.product);
Ok(())
}
/// Struct to hold a JSON Pointer as specified in [RFC
/// 6901](https://tools.ietf.org/html/rfc6901) and a $min/$max boolean.
///
/// This struct is used for filtering artifact store by $min/$max.
///
struct MinMaxPointer {
pointer: String,
is_min: bool,
}
impl std::fmt::Debug for MinMaxPointer {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "({}, {})", self.pointer, if self.is_min { "$min" } else { "$max" })
}
}
/// Returns a MinMaxPointer containing a JSON Pointer and "$min" or "$max" value.
///
/// No more than one $min or $max is allowed, so check and return errors.
///
fn get_min_max_pointer(json_object: &Map<String, Value>) -> Result<Option<MinMaxPointer>> {
let mut r = collect_min_max_pointers(json_object, "".to_string());
match r.len() {
0 => Ok(None),
1 => Ok(Some(r.remove(0))),
_ => ffx_bail!("More than one $min/$max found while processing spec file! {:?}", r),
}
}
/// Recursively collect JSON Pointers for keys containing the string
/// value "$min" or "$max" in the spec attributes.
///
/// JSON Pointers are used to look up values from a Value::Object for
/// filtering artifact store entries.
///
/// Return a vec of MinMaxPointer structs and the caller checks that no
/// more than 1 struct is returned.
///
fn collect_min_max_pointers(json_object: &Map<String, Value>, path: String) -> Vec<MinMaxPointer> {
// Collect in a tuple so we can catch the error of too many sort keys.
let mut result = Vec::<MinMaxPointer>::new();
for (key, value) in json_object.iter() {
match value {
Value::String(s) => {
if s == "$min" || s == "$max" {
result.push(MinMaxPointer {
pointer: format!("{}/{}", path, key),
is_min: s == "$min",
})
}
}
Value::Object(o) => {
result.append(&mut collect_min_max_pointers(o, format!("{}/{}", path, key)));
}
Value::Null | Value::Bool(_) | Value::Number(_) | Value::Array(_) => {}
}
}
result
}
/// Compare two Value::Object types using a JSON Pointer to extract the
/// comparison field.
///
/// Since this function is used by sort_by it returns Ordering. Panics
/// if either comparison field is missing or the field is not a number
/// or string.
///
fn value_object_partial_cmp(
a_object: &Value,
b_object: &Value,
pointer: &String,
) -> Option<Ordering> {
// values must be available, otherwise fatal error
let a: &Value = a_object
.pointer(pointer)
.unwrap_or_else(|| panic!("Missing field '{}' during $min/$max", pointer));
let b: &Value = b_object
.pointer(pointer)
.unwrap_or_else(|| panic!("Missing field '{}' during $min/$max", pointer));
match (a, b) {
(Value::Number(na), Value::Number(nb)) => {
na.as_f64().unwrap().partial_cmp(&nb.as_f64().unwrap())
}
(Value::String(sa), Value::String(sb)) => sa.partial_cmp(sb),
(_, _) => panic!("$min/$max field ({}) is not Number or String: {} {}", pointer, a, b),
}
}
/// Find the $min, $max and return the index.
///
fn find_min_max(
artifact_groups: &Vec<ArtifactStoreGroup>,
matches: &Vec<usize>,
attributes: &Map<String, Value>,
) -> Result<usize> {
// The next statement returns Err() when more than 1 $min/$max is present
let min_max_pointer = get_min_max_pointer(attributes)?;
match min_max_pointer {
None => {
if artifact_groups.len() > 1 {
ffx_bail!("Multiple artifact groups (probably missing $min/$max)");
}
Ok(0)
}
Some(p) => Ok(*matches
.iter()
.max_by(|&a, &b| {
let a_attributes = &artifact_groups[*a].attributes;
let b_attributes = &artifact_groups[*b].attributes;
value_object_partial_cmp(a_attributes, b_attributes, &p.pointer)
.map(|ordering| if p.is_min { ordering.reverse() } else { ordering })
.unwrap()
})
.unwrap()),
}
}
/// Returns the artifact for an artifact store entry by name.
///
fn get_artifact(
artifact_store_group: &ArtifactStoreGroup,
name: &str,
) -> Option<ArtifactStoreEntry> {
artifact_store_group.artifacts.iter().find(|&a| a.name == name).and_then(|a| Some(a.clone()))
}
/// Return artifact_groups.json for different kinds of artifact stores.
///
async fn read_artifact_groups(
store: &SpecArtifactStore,
cmd: &UpdateCommand,
) -> Result<ArtifactStore> {
match store.r#type {
SpecArtifactStoreKind::TUF => {
if store.repo.is_none() {
ffx_bail!("Missing repo field in artifact store")
}
let repo = store.repo.as_ref().unwrap();
let uri = format!("{}/{}", repo, TUF_ARTIFACT_GROUPS_PATH)
.parse::<Uri>()
.map_err(|e| ffx_error!(r#"Parse Uri failed for "{}": {}"#, repo, e))?;
let client = new_https_client();
let response = client
.get(uri.clone())
.await
.map_err(|e| ffx_error!(r#"Failed on http get for "{}": {}"#, uri, e))?;
if response.status() != StatusCode::OK {
ffx_bail!("http get error {} {}. \n", &uri, response.status(),);
}
let bytes = body::to_bytes(response.into_body()).await?;
let body = String::from_utf8(bytes.to_vec()).expect("response was not valid utf-8");
Ok(serde_json::from_str(&body)
.map_err(|e| ffx_error!(r#"Cannot parse json from "{}": {}"#, &uri, e))?)
}
SpecArtifactStoreKind::Local => {
if store.path.is_none() {
ffx_bail!("Missing path field in store kind");
}
let path_suffix = store.path.as_ref().unwrap();
if cmd.artifact_root.is_none() {
ffx_bail!("Missing --artifact-root parameter");
}
let path = format!("{}/{}", cmd.artifact_root.as_ref().unwrap(), path_suffix);
let reader = BufReader::new(
File::open(path.clone())
.map_err(|e| ffx_error!(r#"Cannot open "{}": {}"#, &path, e))?,
);
Ok(serde_json::from_reader(reader)
.map_err(|e| ffx_error!(r#"Cannot parse json from "{}": {}"#, &path, e))?)
}
}
}
/// Recursively match the artifact group attributes against the specification pattern.
///
/// True if a match.
///
fn match_object(group_attributes: &Value, spec_pattern: &Map<String, Value>) -> bool {
if !group_attributes.is_object() {
panic!("match_object: not an object.");
}
for (key, spec_value) in spec_pattern.iter() {
if let Some(group_value) = group_attributes.get(key) {
// Do not compare $min/$max spec values
if *spec_value != json!("$min") && *spec_value != json!("$max") {
if group_value.is_object() && spec_value.is_object() {
// Compare Object types recursively
if !match_object(group_value, spec_value.as_object().unwrap()) {
return false;
}
} else if *group_value != *spec_value {
// Compare Bool, Number, String, Array
return false;
};
}
} else {
// No value for the key in the spec, probably a user error
println!("Missing value during match for key \"{}\"", key);
return false;
}
}
true
}
/// Match artifacts groups from the artifact store file and spec attribute pattern.
///
/// Returns the index of the matching group.
///
fn match_artifacts(
artifact_groups: &Vec<ArtifactStoreGroup>,
spec_attribute_pattern: &Map<String, Value>,
) -> Result<usize> {
let mut matches = Vec::<usize>::new();
for (index, artifact_group) in artifact_groups.iter().enumerate() {
if match_object(&artifact_group.attributes, spec_attribute_pattern) {
matches.push(index);
}
}
let index = find_min_max(&artifact_groups, &matches, &spec_attribute_pattern)?;
Ok(index)
}
/// Merge two Option<Map> and return a new map. Entries are cloned.
///
/// Note: a duplicate key in b overwrites the value from a.
///
fn merge(a: &Option<Map<String, Value>>, b: &Option<Map<String, Value>>) -> Map<String, Value> {
let mut result = Map::new();
if let Some(map) = a {
result.extend(map.into_iter().map(|(k, v)| (k.clone(), v.clone())));
}
if let Some(map) = b {
result.extend(map.into_iter().map(|(k, v)| (k.clone(), v.clone())));
}
result
}
async fn get_blobs(
content_address_storage: Option<String>,
hash: String,
artifact_root: Option<String>,
) -> Result<Vec<String>> {
let tempdir = tempfile::tempdir().unwrap();
let mut result = vec![hash.clone()];
let meta_far_path = if content_address_storage.is_none() {
PathBuf::from(artifact_root.unwrap()).join(hash.to_string())
} else {
let hostname = content_address_storage.unwrap();
let uri = format!("{}/{}", hostname, hash)
.parse::<Uri>() | let mut res = client
.get(uri.clone())
.await
.map_err(|e| ffx_error!(r#"Failed on http get for "{}": {}"#, uri, e))?;
let status = res.status();
if status != StatusCode::OK {
ffx_bail!("Cannot download meta.far. Status is {}. Uri is: {}.", status, &uri);
}
let meta_far_path = tempdir.path().join("meta.far");
let mut output = async_fs::File::create(&meta_far_path).await?;
while let Some(next) = res.data().await {
let chunk = next?;
output.write_all(&chunk).await?;
}
output.sync_all().await?;
meta_far_path
};
let mut archive = File::open(&meta_far_path)
.map_err(|e| ffx_error!(r#"Cannot open meta_far "{}": {}"#, meta_far_path.display(), e))?;
let mut meta_far = fuchsia_archive::Reader::new(&mut archive).map_err(|e| {
ffx_error!(r#"Cannot read fuchsia_archive "{}": {}"#, meta_far_path.display(), e)
})?;
let meta_contents = meta_far.read_file("meta/contents").map_err(|e| {
ffx_error!(r#"Cannot read "meta/contens" from "{}": {}"#, meta_far_path.display(), e)
})?;
let meta_contents = MetaContents::deserialize(meta_contents.as_slice())?.into_contents();
result.extend(meta_contents.into_iter().map(|(_, hash)| hash.to_string()));
return Ok(result);
}
/// Main processing of a spec file
///
async fn process_spec(spec: &Spec, cmd: &UpdateCommand) -> Result<()> {
let mut lock_artifacts = Vec::<LockArtifact>::new();
for spec_artifact_group in spec.artifact_groups.iter() {
// SpecArtifactGroup has a store and list of artifacts
let spec_artifact_store = &spec_artifact_group.artifact_store;
let artifact_store_groups = read_artifact_groups(&spec_artifact_store, cmd).await?;
// find each artifact in the spec in the store
for spec_artifact in spec_artifact_group.artifacts.iter() {
let name = &spec_artifact.name;
// Merge attributes from group and spec
let attributes = merge(&spec.attributes, &spec_artifact_group.attributes);
// Select the single group that matches
let groups = &artifact_store_groups.artifact_groups;
let matching_index: usize = match_artifacts(groups, &attributes)?;
let matching_group = &groups[matching_index];
let artifact_store_group_entry =
get_artifact(matching_group, name).expect("missing artifiact");
let artifact_output = LockArtifact {
name: name.to_owned(),
r#type: artifact_store_group_entry.r#type,
artifact_store: LockArtifactStore {
name: spec_artifact_store.name.to_string(),
artifact_group_name: matching_group.name.to_string(),
r#type: spec_artifact_store.r#type.clone(),
repo: spec_artifact_store.repo.clone(),
content_address_storage: matching_group.content_address_storage.clone(),
},
attributes: matching_group.attributes.as_object().unwrap().clone(),
// todo: rename to hash
merkle: artifact_store_group_entry.hash.clone(),
blobs: get_blobs(
matching_group.content_address_storage.clone(),
artifact_store_group_entry.hash,
cmd.artifact_root.clone(),
)
.await?,
};
lock_artifacts.push(artifact_output);
}
}
let lock = Lock { artifacts: lock_artifacts };
let file = OpenOptions::new()
.create(true)
.write(true)
.truncate(true)
.open(cmd.out.clone())
.map_err(|e| ffx_error!(r#"Cannot create lock file "{}": {}"#, cmd.out.display(), e))?;
// write file
serde_json::to_writer_pretty(&file, &lock)?;
Ok(())
}
// tests
#[cfg(test)]
mod test {
use super::*;
use fuchsia_async as fasync;
use fuchsia_pkg::MetaPackage;
use fuchsia_pkg::{build_with_file_system, CreationManifest, FileSystem};
use maplit::{btreemap, hashmap};
use pkg::{
manager::RepositoryManager, server::RepositoryServer,
test_utils::make_writable_empty_repository,
};
use serde_json::json;
use serde_json5;
use std::collections::HashMap;
use std::convert::TryInto;
use std::fs;
use std::io;
use std::io::Write;
use std::net::Ipv4Addr;
use std::path::PathBuf;
use std::sync::Arc;
/// Test artifact hash
#[test]
fn test_get_hash() {
// Test data in json5 format for cleaner look
let data = r#"
{
name: "1361ee2a-e384-4eda-9f25-694affdeb30e",
content_address_storage: "fuchsia-blobs.googleusercontent.com",
type: "tuf",
attributes: {version: "63"},
artifacts: [
{ name: "one", merkle: "hash_1", sha256: "2", type: "package" },
{ name: "two", merkle: "hash_2", sha256: "3", type: "package" },
],
}"#;
// Parse the test data
let v: ArtifactStoreGroup = serde_json5::from_str(data).unwrap();
assert_eq!(get_artifact(&v, "one").unwrap().hash, "hash_1");
}
// For testing comparisons
impl PartialEq for MinMaxPointer {
fn eq(&self, other: &MinMaxPointer) -> bool {
self.is_min == other.is_min && self.pointer == other.pointer
}
}
#[test]
fn test_get_min_max_pointer() {
let object = json!({
"name": "John",
"age": {
"human": "$max",
"dog": 49,
}
});
let ptr = get_min_max_pointer(&object.as_object().unwrap());
// A Result containing an Option containing a tuple
assert_eq!(
ptr.unwrap().unwrap(),
MinMaxPointer { pointer: "/age/human".to_string(), is_min: false }
)
}
// Tests the filtering of artifact store groups by $min/$max
//
#[test]
fn test_find_min_max() {
let store: ArtifactStore = serde_json::from_str(
r#"
{
"schema_version": "v1",
"artifact_groups": [
{
"artifacts": [ ],
"attributes": {
"creation_time": "2021-09-06T11:37:36.054280"
},
"name": "group_a"
}, {
"artifacts": [ ],
"attributes": {
"creation_time": "2021-09-06T11:37:36.054281"
},
"name": "group_b"
}
]
}"#,
)
.unwrap();
assert_eq!(store.artifact_groups.len(), 2);
// The spec attributes for the $min case
let json_min = json!({
"creation_time": "$min"
});
// Convert to Map<String,Value> instead of Value.
let spec_attributes_min = json_min.as_object().unwrap();
let matches: Vec<usize> = (0..store.artifact_groups.len()).collect();
assert_eq!(
find_min_max(&store.artifact_groups, &matches, &spec_attributes_min).unwrap(),
0
);
// max
let json_max = json!({
"creation_time": "$max"
});
let spec_attributes_max = json_max.as_object().unwrap();
assert_eq!(
find_min_max(&store.artifact_groups, &matches, &spec_attributes_max).unwrap(),
1
);
}
// Test match_object cases
// - ignores $min/$max fields
// - fails on top level object
// - fails on recursive object
#[test]
fn test_match_object() {
let spec_json = json!({"a": "$max", "b": 1, "c": {"d": true}});
let spec = spec_json.as_object().unwrap();
let group_1 = json!({"a": 1, "b": 1, "c": {"d": true}});
assert!(match_object(&group_1, &spec));
let group_2 = json!({"a": 1, "b": 2, "c": {"d": true}});
assert!(!match_object(&group_2, &spec));
let group_3 = json!({"a": 1, "b": 1, "c": {"d": false}});
assert!(!match_object(&group_3, &spec));
let group_4 = json!({"a": 1, "c": {"d": false}});
assert!(!match_object(&group_4, &spec));
}
#[test]
fn test_value_object_partial_cmp() {
let a = json!({"w": {"x": 1}});
let b = json!({"w": {"x": 2}});
let ordering = value_object_partial_cmp(&a, &b, &"/w/x".to_string());
assert_eq!(ordering, Some(Ordering::Less));
}
struct FakeFileSystem {
content_map: HashMap<String, Vec<u8>>,
}
impl<'a> FileSystem<'a> for FakeFileSystem {
type File = &'a [u8];
fn open(&'a self, path: &str) -> Result<Self::File, io::Error> {
Ok(self.content_map.get(path).unwrap().as_slice())
}
fn len(&self, path: &str) -> Result<u64, io::Error> {
Ok(self.content_map.get(path).unwrap().len() as u64)
}
fn read(&self, path: &str) -> Result<Vec<u8>, io::Error> {
Ok(self.content_map.get(path).unwrap().clone())
}
}
fn create_meta_far(path: PathBuf) {
let creation_manifest = CreationManifest::from_external_and_far_contents(
btreemap! {
"lib/mylib.so".to_string() => "host/mylib.so".to_string()
},
btreemap! {
"meta/my_component.cmx".to_string() => "host/my_component.cmx".to_string(),
"meta/package".to_string() => "host/meta/package".to_string()
},
)
.unwrap();
let component_manifest_contents = "my_component.cmx contents";
let mut v = vec![];
let meta_package = MetaPackage::from_name("my-package-name".parse().unwrap());
meta_package.serialize(&mut v).unwrap();
let file_system = FakeFileSystem {
content_map: hashmap! {
"host/mylib.so".to_string() => Vec::new(),
"host/my_component.cmx".to_string() => component_manifest_contents.as_bytes().to_vec(),
"host/meta/package".to_string() => v
},
};
build_with_file_system(&creation_manifest, &path, "my-package-name", &file_system).unwrap();
}
fn write_file(path: PathBuf, body: &[u8]) {
let mut tmp = tempfile::NamedTempFile::new().unwrap();
tmp.write_all(body).unwrap();
tmp.persist(path).unwrap();
}
#[fuchsia_async::run_singlethreaded(test)]
async fn test_end_to_end_local() {
let tempdir = tempfile::tempdir().unwrap();
let root = tempdir.path();
let out_filename = root.join("artifact_lock.json");
// recreate the test_data directory
for (filename, data) in [
("artifact_spec.json", include_str!("../test_data/artifact_spec.json")),
("artifact_groups.json", include_str!("../test_data/artifact_groups.json")),
("artifact_groups2.json", include_str!("../test_data/artifact_groups2.json")),
] {
fs::write(root.join(filename), data).expect("Unable to write file");
}
let meta_far_path =
root.join("0000000000000000000000000000000000000000000000000000000000000000");
create_meta_far(meta_far_path);
let blob_path =
root.join("15ec7bf0b50732b49f8228e07d24365338f9e3ab994b00af08e5a3bffe55fd8b");
write_file(blob_path, "".as_bytes());
let cmd = UpdateCommand {
spec_file: PathBuf::from(root.join("artifact_spec.json")),
out: out_filename.clone(),
artifact_root: Some(root.display().to_string()),
};
let r = cmd_update(cmd).await;
assert!(r.is_ok());
let new_artifact_lock: Lock = File::open(&out_filename)
.map(BufReader::new)
.map(serde_json::from_reader)
.unwrap()
.unwrap();
let golden_artifact_lock: Lock =
serde_json::from_str(include_str!("../test_data/golden_artifact_lock.json")).unwrap();
assert_eq!(new_artifact_lock, golden_artifact_lock);
}
#[fuchsia_async::run_singlethreaded(test)]
async fn test_end_to_end_tuf() {
let manager = RepositoryManager::new();
let tempdir = tempfile::tempdir().unwrap();
let root = tempdir.path().join("artifact-store");
let repo =
make_writable_empty_repository("artifact-store", root.clone().try_into().unwrap())
.await
.unwrap();
let out_filename = tempdir.path().join("artifact_lock.json");
let meta_far_path = root
.join("repository")
.join("0000000000000000000000000000000000000000000000000000000000000000");
create_meta_far(meta_far_path);
let blob_path = root
.join("repository")
.join("15ec7bf0b50732b49f8228e07d24365338f9e3ab994b00af08e5a3bffe55fd8b");
write_file(blob_path, "".as_bytes());
manager.add(Arc::new(repo));
let addr = (Ipv4Addr::LOCALHOST, 0).into();
let (server_fut, _, server) =
RepositoryServer::builder(addr, Arc::clone(&manager)).start().await.unwrap();
// Run the server in the background.
let task = fasync::Task::local(server_fut);
let tuf_repo_url = server.local_url() + "/artifact-store";
// write artifact_groups.json to server.
let tuf_dir = root.join("repository").join("targets/");
fs::create_dir(&tuf_dir).unwrap();
let artifact_group_path = tuf_dir.join("artifact_groups.json");
fs::write(
artifact_group_path,
include_str!("../test_data/tuf_artifact_groups.json")
.replace("tuf_repo_url", &tuf_repo_url),
)
.unwrap();
// write spec file.
let spec_file_path = tempdir.path().join("artifact_spec.json");
fs::write(
&spec_file_path,
include_str!("../test_data/tuf_artifact_spec.json")
.replace("tuf_repo_url", &tuf_repo_url),
)
.unwrap();
let cmd = UpdateCommand {
spec_file: spec_file_path,
out: out_filename.clone(),
artifact_root: None,
};
cmd_update(cmd).await.unwrap();
let new_artifact_lock: Lock = File::open(&out_filename)
.map(BufReader::new)
.map(serde_json::from_reader)
.unwrap()
.unwrap();
let golden_artifact_lock: Lock = serde_json::from_str(
include_str!("../test_data/golden_tuf_artifact_lock.json")
.replace("tuf_repo_url", &tuf_repo_url)
.as_str(),
)
.unwrap();
assert_eq!(new_artifact_lock, golden_artifact_lock);
// Signal the server to shutdown.
server.stop();
// Wait for the server to actually shut down.
task.await;
}
} | .map_err(|e| ffx_error!(r#"Parse Uri failed for "{}": {}"#, hostname, e))?;
let client = new_https_client(); |
a_bit_of_everything.go | package server
import (
"context"
"fmt"
"io"
"strings"
"sync"
"github.com/golang/glog"
"github.com/golang/protobuf/proto"
"github.com/golang/protobuf/ptypes/duration"
"github.com/golang/protobuf/ptypes/empty"
examples "github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb"
"github.com/grpc-ecosystem/grpc-gateway/examples/proto/sub"
"github.com/grpc-ecosystem/grpc-gateway/examples/proto/sub2"
"github.com/rogpeppe/fastuuid"
"google.golang.org/genproto/googleapis/rpc/errdetails"
"google.golang.org/grpc"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/metadata"
"google.golang.org/grpc/status"
)
// Implements of ABitOfEverythingServiceServer
var uuidgen = fastuuid.MustNewGenerator()
type _ABitOfEverythingServer struct {
v map[string]*examples.ABitOfEverything
m sync.Mutex
}
type ABitOfEverythingServer interface {
examples.ABitOfEverythingServiceServer
examples.StreamServiceServer
}
// EDIT @moul
func NewHandler() ABitOfEverythingServer {
return newABitOfEverythingServer()
}
// END OF EDIT @moul
func | () ABitOfEverythingServer {
return &_ABitOfEverythingServer{
v: make(map[string]*examples.ABitOfEverything),
}
}
func (s *_ABitOfEverythingServer) Create(ctx context.Context, msg *examples.ABitOfEverything) (*examples.ABitOfEverything, error) {
s.m.Lock()
defer s.m.Unlock()
glog.Info(msg)
var uuid string
for {
uuid = fmt.Sprintf("%x", uuidgen.Next())
if _, ok := s.v[uuid]; !ok {
break
}
}
s.v[uuid] = msg
s.v[uuid].Uuid = uuid
glog.Infof("%v", s.v[uuid])
return s.v[uuid], nil
}
func (s *_ABitOfEverythingServer) CreateBody(ctx context.Context, msg *examples.ABitOfEverything) (*examples.ABitOfEverything, error) {
return s.Create(ctx, msg)
}
func (s *_ABitOfEverythingServer) BulkCreate(stream examples.StreamService_BulkCreateServer) error {
count := 0
ctx := stream.Context()
for {
msg, err := stream.Recv()
if err == io.EOF {
break
}
if err != nil {
return err
}
count++
glog.Error(msg)
if _, err = s.Create(ctx, msg); err != nil {
return err
}
}
err := stream.SendHeader(metadata.New(map[string]string{
"count": fmt.Sprintf("%d", count),
}))
if err != nil {
return nil
}
stream.SetTrailer(metadata.New(map[string]string{
"foo": "foo2",
"bar": "bar2",
}))
return stream.SendAndClose(new(empty.Empty))
}
func (s *_ABitOfEverythingServer) Lookup(ctx context.Context, msg *sub2.IdMessage) (*examples.ABitOfEverything, error) {
s.m.Lock()
defer s.m.Unlock()
glog.Info(msg)
err := grpc.SendHeader(ctx, metadata.New(map[string]string{
"uuid": msg.Uuid,
}))
if err != nil {
return nil, err
}
if a, ok := s.v[msg.Uuid]; ok {
return a, nil
}
grpc.SetTrailer(ctx, metadata.New(map[string]string{
"foo": "foo2",
"bar": "bar2",
}))
return nil, status.Errorf(codes.NotFound, "not found")
}
func (s *_ABitOfEverythingServer) List(_ *empty.Empty, stream examples.StreamService_ListServer) error {
s.m.Lock()
defer s.m.Unlock()
err := stream.SendHeader(metadata.New(map[string]string{
"count": fmt.Sprintf("%d", len(s.v)),
}))
if err != nil {
return nil
}
for _, msg := range s.v {
if err := stream.Send(msg); err != nil {
return err
}
}
// return error when metadata includes error header
if header, ok := metadata.FromIncomingContext(stream.Context()); ok {
if v, ok := header["error"]; ok {
stream.SetTrailer(metadata.New(map[string]string{
"foo": "foo2",
"bar": "bar2",
}))
return status.Errorf(codes.InvalidArgument, "error metadata: %v", v)
}
}
return nil
}
func (s *_ABitOfEverythingServer) Update(ctx context.Context, msg *examples.ABitOfEverything) (*empty.Empty, error) {
s.m.Lock()
defer s.m.Unlock()
glog.Info(msg)
if _, ok := s.v[msg.Uuid]; ok {
s.v[msg.Uuid] = msg
} else {
return nil, status.Errorf(codes.NotFound, "not found")
}
return new(empty.Empty), nil
}
func (s *_ABitOfEverythingServer) UpdateV2(ctx context.Context, msg *examples.UpdateV2Request) (*empty.Empty, error) {
glog.Info(msg)
// If there is no update mask do a regular update
if msg.UpdateMask == nil || len(msg.UpdateMask.GetPaths()) == 0 {
return s.Update(ctx, msg.Abe)
}
s.m.Lock()
defer s.m.Unlock()
if a, ok := s.v[msg.Abe.Uuid]; ok {
applyFieldMask(a, msg.Abe, msg.UpdateMask)
} else {
return nil, status.Errorf(codes.NotFound, "not found")
}
return new(empty.Empty), nil
}
// PatchWithFieldMaskInBody differs from UpdateV2 only in that this method exposes the field mask in the request body,
// so that clients can specify their mask explicitly
func (s *_ABitOfEverythingServer) PatchWithFieldMaskInBody(ctx context.Context, request *examples.UpdateV2Request) (*empty.Empty, error) {
// low-effort attempt to modify the field mask to only include paths for the ABE struct. Since this is only for the
// integration tests, this narrow implementaion is fine.
if request.UpdateMask != nil {
var shifted []string
for _, path := range request.UpdateMask.GetPaths() {
shifted = append(shifted, strings.TrimPrefix(path, "Abe."))
}
request.UpdateMask.Paths = shifted
}
return s.UpdateV2(ctx, request)
}
func (s *_ABitOfEverythingServer) Delete(ctx context.Context, msg *sub2.IdMessage) (*empty.Empty, error) {
s.m.Lock()
defer s.m.Unlock()
glog.Info(msg)
if _, ok := s.v[msg.Uuid]; ok {
delete(s.v, msg.Uuid)
} else {
return nil, status.Errorf(codes.NotFound, "not found")
}
return new(empty.Empty), nil
}
func (s *_ABitOfEverythingServer) GetQuery(ctx context.Context, msg *examples.ABitOfEverything) (*empty.Empty, error) {
s.m.Lock()
defer s.m.Unlock()
glog.Info(msg)
if _, ok := s.v[msg.Uuid]; ok {
s.v[msg.Uuid] = msg
} else {
return nil, status.Errorf(codes.NotFound, "not found")
}
return new(empty.Empty), nil
}
func (s *_ABitOfEverythingServer) GetRepeatedQuery(ctx context.Context, msg *examples.ABitOfEverythingRepeated) (*examples.ABitOfEverythingRepeated, error) {
s.m.Lock()
defer s.m.Unlock()
glog.Info(msg)
return msg, nil
}
func (s *_ABitOfEverythingServer) Echo(ctx context.Context, msg *sub.StringMessage) (*sub.StringMessage, error) {
s.m.Lock()
defer s.m.Unlock()
glog.Info(msg)
return msg, nil
}
func (s *_ABitOfEverythingServer) BulkEcho(stream examples.StreamService_BulkEchoServer) error {
var msgs []*sub.StringMessage
for {
msg, err := stream.Recv()
if err == io.EOF {
break
}
if err != nil {
return err
}
msgs = append(msgs, msg)
}
hmd := metadata.New(map[string]string{
"foo": "foo1",
"bar": "bar1",
})
if err := stream.SendHeader(hmd); err != nil {
return err
}
for _, msg := range msgs {
glog.Info(msg)
if err := stream.Send(msg); err != nil {
return err
}
}
stream.SetTrailer(metadata.New(map[string]string{
"foo": "foo2",
"bar": "bar2",
}))
return nil
}
func (s *_ABitOfEverythingServer) DeepPathEcho(ctx context.Context, msg *examples.ABitOfEverything) (*examples.ABitOfEverything, error) {
s.m.Lock()
defer s.m.Unlock()
glog.Info(msg)
return msg, nil
}
func (s *_ABitOfEverythingServer) NoBindings(ctx context.Context, msg *duration.Duration) (*empty.Empty, error) {
return nil, nil
}
func (s *_ABitOfEverythingServer) Timeout(ctx context.Context, msg *empty.Empty) (*empty.Empty, error) {
select {
case <-ctx.Done():
return nil, ctx.Err()
}
}
func (s *_ABitOfEverythingServer) ErrorWithDetails(ctx context.Context, msg *empty.Empty) (*empty.Empty, error) {
stat, err := status.New(codes.Unknown, "with details").
WithDetails(proto.Message(
&errdetails.DebugInfo{
StackEntries: []string{"foo:1"},
Detail: "error debug details",
},
))
if err != nil {
return nil, status.Errorf(codes.Internal, "unexpected error adding details: %s", err)
}
return nil, stat.Err()
}
func (s *_ABitOfEverythingServer) GetMessageWithBody(ctx context.Context, msg *examples.MessageWithBody) (*empty.Empty, error) {
return &empty.Empty{}, nil
}
func (s *_ABitOfEverythingServer) PostWithEmptyBody(ctx context.Context, msg *examples.Body) (*empty.Empty, error) {
return &empty.Empty{}, nil
}
func (s *_ABitOfEverythingServer) CheckGetQueryParams(ctx context.Context, msg *examples.ABitOfEverything) (*examples.ABitOfEverything, error) {
return msg, nil
}
func (s *_ABitOfEverythingServer) CheckPostQueryParams(ctx context.Context, msg *examples.ABitOfEverything) (*examples.ABitOfEverything, error) {
return msg, nil
}
| newABitOfEverythingServer |
End of preview. Expand
in Dataset Viewer.
README.md exists but content is empty.
Use the Edit dataset card button to edit it.
- Downloads last month
- 59