diff --git "a/data/go/data.json" "b/data/go/data.json" new file mode 100644--- /dev/null +++ "b/data/go/data.json" @@ -0,0 +1,100 @@ +{"size":996,"ext":"go","lang":"Go","max_stars_count":1369.0,"content":"package members\n\nimport \"github.com\/gophercloud\/gophercloud\"\n\nfunc imageMembersURL(c *gophercloud.ServiceClient, imageID string) string {\n\treturn c.ServiceURL(\"images\", imageID, \"members\")\n}\n\nfunc listMembersURL(c *gophercloud.ServiceClient, imageID string) string {\n\treturn imageMembersURL(c, imageID)\n}\n\nfunc createMemberURL(c *gophercloud.ServiceClient, imageID string) string {\n\treturn imageMembersURL(c, imageID)\n}\n\nfunc imageMemberURL(c *gophercloud.ServiceClient, imageID string, memberID string) string {\n\treturn c.ServiceURL(\"images\", imageID, \"members\", memberID)\n}\n\nfunc getMemberURL(c *gophercloud.ServiceClient, imageID string, memberID string) string {\n\treturn imageMemberURL(c, imageID, memberID)\n}\n\nfunc updateMemberURL(c *gophercloud.ServiceClient, imageID string, memberID string) string {\n\treturn imageMemberURL(c, imageID, memberID)\n}\n\nfunc deleteMemberURL(c *gophercloud.ServiceClient, imageID string, memberID string) string {\n\treturn imageMemberURL(c, imageID, memberID)\n}\n","avg_line_length":31.125,"max_line_length":92,"alphanum_fraction":0.7941767068} +{"size":11308,"ext":"go","lang":"Go","max_stars_count":null,"content":"\/\/ *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***\n\/\/ *** Do not edit by hand unless you're certain you know what you are doing! ***\n\npackage environment\n\nimport (\n\t\"context\"\n\t\"reflect\"\n\n\t\"github.com\/pkg\/errors\"\n\t\"github.com\/pulumi\/pulumi\/sdk\/v3\/go\/pulumi\"\n)\n\ntype Certifi struct {\n\tpulumi.CustomResourceState\n\n\t\/\/ The PEM encoded certificate\n\tCertificate pulumi.StringOutput `pulumi:\"certificate\"`\n\t\/\/ The PEM encoded certificate chain\n\tCertificateChain pulumi.StringPtrOutput `pulumi:\"certificateChain\"`\n\t\/\/ The expiration date\n\tExpirationDate pulumi.StringOutput `pulumi:\"expirationDate\"`\n\t\/\/ The file name\n\tFileName pulumi.StringOutput `pulumi:\"fileName\"`\n\t\/\/ The issuer\n\tIssuer pulumi.StringOutput `pulumi:\"issuer\"`\n\t\/\/ The node name\n\tNodeName pulumi.StringOutput `pulumi:\"nodeName\"`\n\t\/\/ Whether to overwrite an existing certificate\n\tOverwrite pulumi.BoolPtrOutput `pulumi:\"overwrite\"`\n\t\/\/ The PEM encoded private key\n\tPrivateKey pulumi.StringOutput `pulumi:\"privateKey\"`\n\t\/\/ The public key size\n\tPublicKeySize pulumi.IntOutput `pulumi:\"publicKeySize\"`\n\t\/\/ The public key type\n\tPublicKeyType pulumi.StringOutput `pulumi:\"publicKeyType\"`\n\t\/\/ The SSL fingerprint\n\tSslFingerprint pulumi.StringOutput `pulumi:\"sslFingerprint\"`\n\t\/\/ The start date\n\tStartDate pulumi.StringOutput `pulumi:\"startDate\"`\n\t\/\/ The subject\n\tSubject pulumi.StringOutput `pulumi:\"subject\"`\n\t\/\/ The subject alternative names\n\tSubjectAlternativeNames pulumi.StringArrayOutput `pulumi:\"subjectAlternativeNames\"`\n}\n\n\/\/ NewCertifi registers a new resource with the given unique name, arguments, and options.\nfunc NewCertifi(ctx *pulumi.Context,\n\tname string, args *CertifiArgs, opts ...pulumi.ResourceOption) (*Certifi, error) {\n\tif args == nil {\n\t\treturn nil, errors.New(\"missing one or more required arguments\")\n\t}\n\n\tif args.Certificate == nil {\n\t\treturn nil, errors.New(\"invalid value for required argument 'Certificate'\")\n\t}\n\tif args.NodeName == nil {\n\t\treturn nil, errors.New(\"invalid value for required argument 'NodeName'\")\n\t}\n\tif args.PrivateKey == nil {\n\t\treturn nil, errors.New(\"invalid value for required argument 'PrivateKey'\")\n\t}\n\tvar resource Certifi\n\terr := ctx.RegisterResource(\"proxmoxve:Environment\/certifi:Certifi\", name, args, &resource, opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &resource, nil\n}\n\n\/\/ GetCertifi gets an existing Certifi resource's state with the given name, ID, and optional\n\/\/ state properties that are used to uniquely qualify the lookup (nil if not required).\nfunc GetCertifi(ctx *pulumi.Context,\n\tname string, id pulumi.IDInput, state *CertifiState, opts ...pulumi.ResourceOption) (*Certifi, error) {\n\tvar resource Certifi\n\terr := ctx.ReadResource(\"proxmoxve:Environment\/certifi:Certifi\", name, id, state, &resource, opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &resource, nil\n}\n\n\/\/ Input properties used for looking up and filtering Certifi resources.\ntype certifiState struct {\n\t\/\/ The PEM encoded certificate\n\tCertificate *string `pulumi:\"certificate\"`\n\t\/\/ The PEM encoded certificate chain\n\tCertificateChain *string `pulumi:\"certificateChain\"`\n\t\/\/ The expiration date\n\tExpirationDate *string `pulumi:\"expirationDate\"`\n\t\/\/ The file name\n\tFileName *string `pulumi:\"fileName\"`\n\t\/\/ The issuer\n\tIssuer *string `pulumi:\"issuer\"`\n\t\/\/ The node name\n\tNodeName *string `pulumi:\"nodeName\"`\n\t\/\/ Whether to overwrite an existing certificate\n\tOverwrite *bool `pulumi:\"overwrite\"`\n\t\/\/ The PEM encoded private key\n\tPrivateKey *string `pulumi:\"privateKey\"`\n\t\/\/ The public key size\n\tPublicKeySize *int `pulumi:\"publicKeySize\"`\n\t\/\/ The public key type\n\tPublicKeyType *string `pulumi:\"publicKeyType\"`\n\t\/\/ The SSL fingerprint\n\tSslFingerprint *string `pulumi:\"sslFingerprint\"`\n\t\/\/ The start date\n\tStartDate *string `pulumi:\"startDate\"`\n\t\/\/ The subject\n\tSubject *string `pulumi:\"subject\"`\n\t\/\/ The subject alternative names\n\tSubjectAlternativeNames []string `pulumi:\"subjectAlternativeNames\"`\n}\n\ntype CertifiState struct {\n\t\/\/ The PEM encoded certificate\n\tCertificate pulumi.StringPtrInput\n\t\/\/ The PEM encoded certificate chain\n\tCertificateChain pulumi.StringPtrInput\n\t\/\/ The expiration date\n\tExpirationDate pulumi.StringPtrInput\n\t\/\/ The file name\n\tFileName pulumi.StringPtrInput\n\t\/\/ The issuer\n\tIssuer pulumi.StringPtrInput\n\t\/\/ The node name\n\tNodeName pulumi.StringPtrInput\n\t\/\/ Whether to overwrite an existing certificate\n\tOverwrite pulumi.BoolPtrInput\n\t\/\/ The PEM encoded private key\n\tPrivateKey pulumi.StringPtrInput\n\t\/\/ The public key size\n\tPublicKeySize pulumi.IntPtrInput\n\t\/\/ The public key type\n\tPublicKeyType pulumi.StringPtrInput\n\t\/\/ The SSL fingerprint\n\tSslFingerprint pulumi.StringPtrInput\n\t\/\/ The start date\n\tStartDate pulumi.StringPtrInput\n\t\/\/ The subject\n\tSubject pulumi.StringPtrInput\n\t\/\/ The subject alternative names\n\tSubjectAlternativeNames pulumi.StringArrayInput\n}\n\nfunc (CertifiState) ElementType() reflect.Type {\n\treturn reflect.TypeOf((*certifiState)(nil)).Elem()\n}\n\ntype certifiArgs struct {\n\t\/\/ The PEM encoded certificate\n\tCertificate string `pulumi:\"certificate\"`\n\t\/\/ The PEM encoded certificate chain\n\tCertificateChain *string `pulumi:\"certificateChain\"`\n\t\/\/ The node name\n\tNodeName string `pulumi:\"nodeName\"`\n\t\/\/ Whether to overwrite an existing certificate\n\tOverwrite *bool `pulumi:\"overwrite\"`\n\t\/\/ The PEM encoded private key\n\tPrivateKey string `pulumi:\"privateKey\"`\n}\n\n\/\/ The set of arguments for constructing a Certifi resource.\ntype CertifiArgs struct {\n\t\/\/ The PEM encoded certificate\n\tCertificate pulumi.StringInput\n\t\/\/ The PEM encoded certificate chain\n\tCertificateChain pulumi.StringPtrInput\n\t\/\/ The node name\n\tNodeName pulumi.StringInput\n\t\/\/ Whether to overwrite an existing certificate\n\tOverwrite pulumi.BoolPtrInput\n\t\/\/ The PEM encoded private key\n\tPrivateKey pulumi.StringInput\n}\n\nfunc (CertifiArgs) ElementType() reflect.Type {\n\treturn reflect.TypeOf((*certifiArgs)(nil)).Elem()\n}\n\ntype CertifiInput interface {\n\tpulumi.Input\n\n\tToCertifiOutput() CertifiOutput\n\tToCertifiOutputWithContext(ctx context.Context) CertifiOutput\n}\n\nfunc (*Certifi) ElementType() reflect.Type {\n\treturn reflect.TypeOf((*Certifi)(nil))\n}\n\nfunc (i *Certifi) ToCertifiOutput() CertifiOutput {\n\treturn i.ToCertifiOutputWithContext(context.Background())\n}\n\nfunc (i *Certifi) ToCertifiOutputWithContext(ctx context.Context) CertifiOutput {\n\treturn pulumi.ToOutputWithContext(ctx, i).(CertifiOutput)\n}\n\nfunc (i *Certifi) ToCertifiPtrOutput() CertifiPtrOutput {\n\treturn i.ToCertifiPtrOutputWithContext(context.Background())\n}\n\nfunc (i *Certifi) ToCertifiPtrOutputWithContext(ctx context.Context) CertifiPtrOutput {\n\treturn pulumi.ToOutputWithContext(ctx, i).(CertifiPtrOutput)\n}\n\ntype CertifiPtrInput interface {\n\tpulumi.Input\n\n\tToCertifiPtrOutput() CertifiPtrOutput\n\tToCertifiPtrOutputWithContext(ctx context.Context) CertifiPtrOutput\n}\n\ntype certifiPtrType CertifiArgs\n\nfunc (*certifiPtrType) ElementType() reflect.Type {\n\treturn reflect.TypeOf((**Certifi)(nil))\n}\n\nfunc (i *certifiPtrType) ToCertifiPtrOutput() CertifiPtrOutput {\n\treturn i.ToCertifiPtrOutputWithContext(context.Background())\n}\n\nfunc (i *certifiPtrType) ToCertifiPtrOutputWithContext(ctx context.Context) CertifiPtrOutput {\n\treturn pulumi.ToOutputWithContext(ctx, i).(CertifiPtrOutput)\n}\n\n\/\/ CertifiArrayInput is an input type that accepts CertifiArray and CertifiArrayOutput values.\n\/\/ You can construct a concrete instance of `CertifiArrayInput` via:\n\/\/\n\/\/ CertifiArray{ CertifiArgs{...} }\ntype CertifiArrayInput interface {\n\tpulumi.Input\n\n\tToCertifiArrayOutput() CertifiArrayOutput\n\tToCertifiArrayOutputWithContext(context.Context) CertifiArrayOutput\n}\n\ntype CertifiArray []CertifiInput\n\nfunc (CertifiArray) ElementType() reflect.Type {\n\treturn reflect.TypeOf((*[]*Certifi)(nil)).Elem()\n}\n\nfunc (i CertifiArray) ToCertifiArrayOutput() CertifiArrayOutput {\n\treturn i.ToCertifiArrayOutputWithContext(context.Background())\n}\n\nfunc (i CertifiArray) ToCertifiArrayOutputWithContext(ctx context.Context) CertifiArrayOutput {\n\treturn pulumi.ToOutputWithContext(ctx, i).(CertifiArrayOutput)\n}\n\n\/\/ CertifiMapInput is an input type that accepts CertifiMap and CertifiMapOutput values.\n\/\/ You can construct a concrete instance of `CertifiMapInput` via:\n\/\/\n\/\/ CertifiMap{ \"key\": CertifiArgs{...} }\ntype CertifiMapInput interface {\n\tpulumi.Input\n\n\tToCertifiMapOutput() CertifiMapOutput\n\tToCertifiMapOutputWithContext(context.Context) CertifiMapOutput\n}\n\ntype CertifiMap map[string]CertifiInput\n\nfunc (CertifiMap) ElementType() reflect.Type {\n\treturn reflect.TypeOf((*map[string]*Certifi)(nil)).Elem()\n}\n\nfunc (i CertifiMap) ToCertifiMapOutput() CertifiMapOutput {\n\treturn i.ToCertifiMapOutputWithContext(context.Background())\n}\n\nfunc (i CertifiMap) ToCertifiMapOutputWithContext(ctx context.Context) CertifiMapOutput {\n\treturn pulumi.ToOutputWithContext(ctx, i).(CertifiMapOutput)\n}\n\ntype CertifiOutput struct{ *pulumi.OutputState }\n\nfunc (CertifiOutput) ElementType() reflect.Type {\n\treturn reflect.TypeOf((*Certifi)(nil))\n}\n\nfunc (o CertifiOutput) ToCertifiOutput() CertifiOutput {\n\treturn o\n}\n\nfunc (o CertifiOutput) ToCertifiOutputWithContext(ctx context.Context) CertifiOutput {\n\treturn o\n}\n\nfunc (o CertifiOutput) ToCertifiPtrOutput() CertifiPtrOutput {\n\treturn o.ToCertifiPtrOutputWithContext(context.Background())\n}\n\nfunc (o CertifiOutput) ToCertifiPtrOutputWithContext(ctx context.Context) CertifiPtrOutput {\n\treturn o.ApplyTWithContext(ctx, func(_ context.Context, v Certifi) *Certifi {\n\t\treturn &v\n\t}).(CertifiPtrOutput)\n}\n\ntype CertifiPtrOutput struct{ *pulumi.OutputState }\n\nfunc (CertifiPtrOutput) ElementType() reflect.Type {\n\treturn reflect.TypeOf((**Certifi)(nil))\n}\n\nfunc (o CertifiPtrOutput) ToCertifiPtrOutput() CertifiPtrOutput {\n\treturn o\n}\n\nfunc (o CertifiPtrOutput) ToCertifiPtrOutputWithContext(ctx context.Context) CertifiPtrOutput {\n\treturn o\n}\n\nfunc (o CertifiPtrOutput) Elem() CertifiOutput {\n\treturn o.ApplyT(func(v *Certifi) Certifi {\n\t\tif v != nil {\n\t\t\treturn *v\n\t\t}\n\t\tvar ret Certifi\n\t\treturn ret\n\t}).(CertifiOutput)\n}\n\ntype CertifiArrayOutput struct{ *pulumi.OutputState }\n\nfunc (CertifiArrayOutput) ElementType() reflect.Type {\n\treturn reflect.TypeOf((*[]Certifi)(nil))\n}\n\nfunc (o CertifiArrayOutput) ToCertifiArrayOutput() CertifiArrayOutput {\n\treturn o\n}\n\nfunc (o CertifiArrayOutput) ToCertifiArrayOutputWithContext(ctx context.Context) CertifiArrayOutput {\n\treturn o\n}\n\nfunc (o CertifiArrayOutput) Index(i pulumi.IntInput) CertifiOutput {\n\treturn pulumi.All(o, i).ApplyT(func(vs []interface{}) Certifi {\n\t\treturn vs[0].([]Certifi)[vs[1].(int)]\n\t}).(CertifiOutput)\n}\n\ntype CertifiMapOutput struct{ *pulumi.OutputState }\n\nfunc (CertifiMapOutput) ElementType() reflect.Type {\n\treturn reflect.TypeOf((*map[string]Certifi)(nil))\n}\n\nfunc (o CertifiMapOutput) ToCertifiMapOutput() CertifiMapOutput {\n\treturn o\n}\n\nfunc (o CertifiMapOutput) ToCertifiMapOutputWithContext(ctx context.Context) CertifiMapOutput {\n\treturn o\n}\n\nfunc (o CertifiMapOutput) MapIndex(k pulumi.StringInput) CertifiOutput {\n\treturn pulumi.All(o, k).ApplyT(func(vs []interface{}) Certifi {\n\t\treturn vs[0].(map[string]Certifi)[vs[1].(string)]\n\t}).(CertifiOutput)\n}\n\nfunc init() {\n\tpulumi.RegisterOutputType(CertifiOutput{})\n\tpulumi.RegisterOutputType(CertifiPtrOutput{})\n\tpulumi.RegisterOutputType(CertifiArrayOutput{})\n\tpulumi.RegisterOutputType(CertifiMapOutput{})\n}\n","avg_line_length":30.3163538874,"max_line_length":104,"alphanum_fraction":0.7733463035} +{"size":1747,"ext":"go","lang":"Go","max_stars_count":null,"content":"package main\n\nimport (\n\t\"context\"\n\t\"flag\"\n\t\"fmt\"\n\t\"google.golang.org\/grpc\"\n\t\"google.golang.org\/grpc\/keepalive\"\n\tpb \"go-grpc-example\/2-simple_rpc\/proto\"\n\t\"log\"\n\t\"time\"\n)\n\n\/\/ Address \u8fde\u63a5\u5730\u5740\nconst Address string = \":8000\"\n\n\nvar addr = flag.String(\"addr\", \"localhost:50052\", \"the address to connect to\")\n\nvar kacp = keepalive.ClientParameters{\n\tTime: 10 * time.Second, \/\/ send pings every 10 seconds if there is no activity\n\tTimeout: time.Second, \/\/ wait 1 second for ping ack before considering the connection dead\n\tPermitWithoutStream: true, \/\/ send pings even without active streams\n}\n\nvar grpcClient pb.SimpleClient\n\nfunc main() {\n\t\/\/ \u8fde\u63a5\u670d\u52a1\u5668\n\tconn, err := grpc.Dial(Address, grpc.WithInsecure(),grpc.WithKeepaliveParams(kacp))\n\tif err != nil {\n\t\tlog.Fatalf(\"net.Connect err: %v\", err)\n\t}\n\tdefer conn.Close()\n\n\t\/\/ \u5efa\u7acbgRPC\u8fde\u63a5\n\tgrpcClient = pb.NewSimpleClient(conn)\n\troute()\n\n\tc := pb.NewSimpleClient(conn)\n\n\tctx, cancel := context.WithTimeout(context.Background(), 3*time.Minute)\n\tdefer cancel()\n\tfmt.Println(\"Performing unary request\")\n\tfor {\n\t\ttime.Sleep(5*time.Second)\n\t\tres, err := c.UnaryEcho(ctx, &pb.EchoRequest{Message: \"keepalive demo\"})\n\t\tif err != nil {\n\t\t\tlog.Fatalf(\"unexpected error from UnaryEcho: %v\", err)\n\t\t}\n\t\tfmt.Println(\"RPC response:\", res)\n\t}\n\n\tselect {} \/\/ Block forever; run with GODEBUG=http2debug=2 to observe ping frames and GOAWAYs due to idleness.\n}\n\n\/\/ route \u8c03\u7528\u670d\u52a1\u7aefRoute\u65b9\u6cd5\nfunc route() {\n\t\/\/ \u521b\u5efa\u53d1\u9001\u7ed3\u6784\u4f53\n\treq := pb.SimpleRequest{\n\t\tData: \"grpc\",\n\t}\n\t\/\/ \u8c03\u7528\u6211\u4eec\u7684\u670d\u52a1(Route\u65b9\u6cd5)\n\t\/\/ \u540c\u65f6\u4f20\u5165\u4e86\u4e00\u4e2a context.Context \uff0c\u5728\u6709\u9700\u8981\u65f6\u53ef\u4ee5\u8ba9\u6211\u4eec\u6539\u53d8RPC\u7684\u884c\u4e3a\uff0c\u6bd4\u5982\u8d85\u65f6\/\u53d6\u6d88\u4e00\u4e2a\u6b63\u5728\u8fd0\u884c\u7684RPC\n\tres, err := grpcClient.Route(context.Background(), &req)\n\tif err != nil {\n\t\tlog.Fatalf(\"Call Route err: %v\", err)\n\t}\n\t\/\/ \u6253\u5370\u8fd4\u56de\u503c\n\tlog.Println(res)\n}\n","avg_line_length":24.2638888889,"max_line_length":110,"alphanum_fraction":0.6828849456} +{"size":14706,"ext":"go","lang":"Go","max_stars_count":null,"content":"\/\/ Code generated by protoc-gen-go-grpc. DO NOT EDIT.\n\npackage protocol\n\nimport (\n\tcontext \"context\"\n\tgrpc \"google.golang.org\/grpc\"\n\tcodes \"google.golang.org\/grpc\/codes\"\n\tstatus \"google.golang.org\/grpc\/status\"\n)\n\n\/\/ This is a compile-time assertion to ensure that this generated file\n\/\/ is compatible with the grpc package it is being compiled against.\n\/\/ Requires gRPC-Go v1.32.0 or later.\nconst _ = grpc.SupportPackageIsVersion7\n\n\/\/ RepoOwnersClient is the client API for RepoOwners service.\n\/\/\n\/\/ For semantics around ctx use and closing\/ending streaming RPCs, please refer to https:\/\/pkg.go.dev\/google.golang.org\/grpc\/?tab=doc#ClientConn.NewStream.\ntype RepoOwnersClient interface {\n\tFindApproverOwnersForFile(ctx context.Context, in *RepoFilePath, opts ...grpc.CallOption) (*Path, error)\n\tFindReviewersOwnersForFile(ctx context.Context, in *RepoFilePath, opts ...grpc.CallOption) (*Path, error)\n\tLeafApprovers(ctx context.Context, in *RepoFilePath, opts ...grpc.CallOption) (*Owners, error)\n\tLeafReviewers(ctx context.Context, in *RepoFilePath, opts ...grpc.CallOption) (*Owners, error)\n\tApprovers(ctx context.Context, in *RepoFilePath, opts ...grpc.CallOption) (*Owners, error)\n\tReviewers(ctx context.Context, in *RepoFilePath, opts ...grpc.CallOption) (*Owners, error)\n\tIsNoParentOwners(ctx context.Context, in *RepoFilePath, opts ...grpc.CallOption) (*NoParentOwners, error)\n\tAllReviewers(ctx context.Context, in *Branch, opts ...grpc.CallOption) (*Owners, error)\n\tTopLevelApprovers(ctx context.Context, in *Branch, opts ...grpc.CallOption) (*Owners, error)\n}\n\ntype repoOwnersClient struct {\n\tcc grpc.ClientConnInterface\n}\n\nfunc NewRepoOwnersClient(cc grpc.ClientConnInterface) RepoOwnersClient {\n\treturn &repoOwnersClient{cc}\n}\n\nfunc (c *repoOwnersClient) FindApproverOwnersForFile(ctx context.Context, in *RepoFilePath, opts ...grpc.CallOption) (*Path, error) {\n\tout := new(Path)\n\terr := c.cc.Invoke(ctx, \"\/repoOwners.RepoOwners\/FindApproverOwnersForFile\", in, out, opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn out, nil\n}\n\nfunc (c *repoOwnersClient) FindReviewersOwnersForFile(ctx context.Context, in *RepoFilePath, opts ...grpc.CallOption) (*Path, error) {\n\tout := new(Path)\n\terr := c.cc.Invoke(ctx, \"\/repoOwners.RepoOwners\/FindReviewersOwnersForFile\", in, out, opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn out, nil\n}\n\nfunc (c *repoOwnersClient) LeafApprovers(ctx context.Context, in *RepoFilePath, opts ...grpc.CallOption) (*Owners, error) {\n\tout := new(Owners)\n\terr := c.cc.Invoke(ctx, \"\/repoOwners.RepoOwners\/LeafApprovers\", in, out, opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn out, nil\n}\n\nfunc (c *repoOwnersClient) LeafReviewers(ctx context.Context, in *RepoFilePath, opts ...grpc.CallOption) (*Owners, error) {\n\tout := new(Owners)\n\terr := c.cc.Invoke(ctx, \"\/repoOwners.RepoOwners\/LeafReviewers\", in, out, opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn out, nil\n}\n\nfunc (c *repoOwnersClient) Approvers(ctx context.Context, in *RepoFilePath, opts ...grpc.CallOption) (*Owners, error) {\n\tout := new(Owners)\n\terr := c.cc.Invoke(ctx, \"\/repoOwners.RepoOwners\/Approvers\", in, out, opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn out, nil\n}\n\nfunc (c *repoOwnersClient) Reviewers(ctx context.Context, in *RepoFilePath, opts ...grpc.CallOption) (*Owners, error) {\n\tout := new(Owners)\n\terr := c.cc.Invoke(ctx, \"\/repoOwners.RepoOwners\/Reviewers\", in, out, opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn out, nil\n}\n\nfunc (c *repoOwnersClient) IsNoParentOwners(ctx context.Context, in *RepoFilePath, opts ...grpc.CallOption) (*NoParentOwners, error) {\n\tout := new(NoParentOwners)\n\terr := c.cc.Invoke(ctx, \"\/repoOwners.RepoOwners\/IsNoParentOwners\", in, out, opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn out, nil\n}\n\nfunc (c *repoOwnersClient) AllReviewers(ctx context.Context, in *Branch, opts ...grpc.CallOption) (*Owners, error) {\n\tout := new(Owners)\n\terr := c.cc.Invoke(ctx, \"\/repoOwners.RepoOwners\/AllReviewers\", in, out, opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn out, nil\n}\n\nfunc (c *repoOwnersClient) TopLevelApprovers(ctx context.Context, in *Branch, opts ...grpc.CallOption) (*Owners, error) {\n\tout := new(Owners)\n\terr := c.cc.Invoke(ctx, \"\/repoOwners.RepoOwners\/TopLevelApprovers\", in, out, opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn out, nil\n}\n\n\/\/ RepoOwnersServer is the server API for RepoOwners service.\n\/\/ All implementations must embed UnimplementedRepoOwnersServer\n\/\/ for forward compatibility\ntype RepoOwnersServer interface {\n\tFindApproverOwnersForFile(context.Context, *RepoFilePath) (*Path, error)\n\tFindReviewersOwnersForFile(context.Context, *RepoFilePath) (*Path, error)\n\tLeafApprovers(context.Context, *RepoFilePath) (*Owners, error)\n\tLeafReviewers(context.Context, *RepoFilePath) (*Owners, error)\n\tApprovers(context.Context, *RepoFilePath) (*Owners, error)\n\tReviewers(context.Context, *RepoFilePath) (*Owners, error)\n\tIsNoParentOwners(context.Context, *RepoFilePath) (*NoParentOwners, error)\n\tAllReviewers(context.Context, *Branch) (*Owners, error)\n\tTopLevelApprovers(context.Context, *Branch) (*Owners, error)\n\tmustEmbedUnimplementedRepoOwnersServer()\n}\n\n\/\/ UnimplementedRepoOwnersServer must be embedded to have forward compatible implementations.\ntype UnimplementedRepoOwnersServer struct {\n}\n\nfunc (UnimplementedRepoOwnersServer) FindApproverOwnersForFile(context.Context, *RepoFilePath) (*Path, error) {\n\treturn nil, status.Errorf(codes.Unimplemented, \"method FindApproverOwnersForFile not implemented\")\n}\nfunc (UnimplementedRepoOwnersServer) FindReviewersOwnersForFile(context.Context, *RepoFilePath) (*Path, error) {\n\treturn nil, status.Errorf(codes.Unimplemented, \"method FindReviewersOwnersForFile not implemented\")\n}\nfunc (UnimplementedRepoOwnersServer) LeafApprovers(context.Context, *RepoFilePath) (*Owners, error) {\n\treturn nil, status.Errorf(codes.Unimplemented, \"method LeafApprovers not implemented\")\n}\nfunc (UnimplementedRepoOwnersServer) LeafReviewers(context.Context, *RepoFilePath) (*Owners, error) {\n\treturn nil, status.Errorf(codes.Unimplemented, \"method LeafReviewers not implemented\")\n}\nfunc (UnimplementedRepoOwnersServer) Approvers(context.Context, *RepoFilePath) (*Owners, error) {\n\treturn nil, status.Errorf(codes.Unimplemented, \"method Approvers not implemented\")\n}\nfunc (UnimplementedRepoOwnersServer) Reviewers(context.Context, *RepoFilePath) (*Owners, error) {\n\treturn nil, status.Errorf(codes.Unimplemented, \"method Reviewers not implemented\")\n}\nfunc (UnimplementedRepoOwnersServer) IsNoParentOwners(context.Context, *RepoFilePath) (*NoParentOwners, error) {\n\treturn nil, status.Errorf(codes.Unimplemented, \"method IsNoParentOwners not implemented\")\n}\nfunc (UnimplementedRepoOwnersServer) AllReviewers(context.Context, *Branch) (*Owners, error) {\n\treturn nil, status.Errorf(codes.Unimplemented, \"method AllReviewers not implemented\")\n}\nfunc (UnimplementedRepoOwnersServer) TopLevelApprovers(context.Context, *Branch) (*Owners, error) {\n\treturn nil, status.Errorf(codes.Unimplemented, \"method TopLevelApprovers not implemented\")\n}\nfunc (UnimplementedRepoOwnersServer) mustEmbedUnimplementedRepoOwnersServer() {}\n\n\/\/ UnsafeRepoOwnersServer may be embedded to opt out of forward compatibility for this service.\n\/\/ Use of this interface is not recommended, as added methods to RepoOwnersServer will\n\/\/ result in compilation errors.\ntype UnsafeRepoOwnersServer interface {\n\tmustEmbedUnimplementedRepoOwnersServer()\n}\n\nfunc RegisterRepoOwnersServer(s grpc.ServiceRegistrar, srv RepoOwnersServer) {\n\ts.RegisterService(&RepoOwners_ServiceDesc, srv)\n}\n\nfunc _RepoOwners_FindApproverOwnersForFile_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {\n\tin := new(RepoFilePath)\n\tif err := dec(in); err != nil {\n\t\treturn nil, err\n\t}\n\tif interceptor == nil {\n\t\treturn srv.(RepoOwnersServer).FindApproverOwnersForFile(ctx, in)\n\t}\n\tinfo := &grpc.UnaryServerInfo{\n\t\tServer: srv,\n\t\tFullMethod: \"\/repoOwners.RepoOwners\/FindApproverOwnersForFile\",\n\t}\n\thandler := func(ctx context.Context, req interface{}) (interface{}, error) {\n\t\treturn srv.(RepoOwnersServer).FindApproverOwnersForFile(ctx, req.(*RepoFilePath))\n\t}\n\treturn interceptor(ctx, in, info, handler)\n}\n\nfunc _RepoOwners_FindReviewersOwnersForFile_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {\n\tin := new(RepoFilePath)\n\tif err := dec(in); err != nil {\n\t\treturn nil, err\n\t}\n\tif interceptor == nil {\n\t\treturn srv.(RepoOwnersServer).FindReviewersOwnersForFile(ctx, in)\n\t}\n\tinfo := &grpc.UnaryServerInfo{\n\t\tServer: srv,\n\t\tFullMethod: \"\/repoOwners.RepoOwners\/FindReviewersOwnersForFile\",\n\t}\n\thandler := func(ctx context.Context, req interface{}) (interface{}, error) {\n\t\treturn srv.(RepoOwnersServer).FindReviewersOwnersForFile(ctx, req.(*RepoFilePath))\n\t}\n\treturn interceptor(ctx, in, info, handler)\n}\n\nfunc _RepoOwners_LeafApprovers_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {\n\tin := new(RepoFilePath)\n\tif err := dec(in); err != nil {\n\t\treturn nil, err\n\t}\n\tif interceptor == nil {\n\t\treturn srv.(RepoOwnersServer).LeafApprovers(ctx, in)\n\t}\n\tinfo := &grpc.UnaryServerInfo{\n\t\tServer: srv,\n\t\tFullMethod: \"\/repoOwners.RepoOwners\/LeafApprovers\",\n\t}\n\thandler := func(ctx context.Context, req interface{}) (interface{}, error) {\n\t\treturn srv.(RepoOwnersServer).LeafApprovers(ctx, req.(*RepoFilePath))\n\t}\n\treturn interceptor(ctx, in, info, handler)\n}\n\nfunc _RepoOwners_LeafReviewers_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {\n\tin := new(RepoFilePath)\n\tif err := dec(in); err != nil {\n\t\treturn nil, err\n\t}\n\tif interceptor == nil {\n\t\treturn srv.(RepoOwnersServer).LeafReviewers(ctx, in)\n\t}\n\tinfo := &grpc.UnaryServerInfo{\n\t\tServer: srv,\n\t\tFullMethod: \"\/repoOwners.RepoOwners\/LeafReviewers\",\n\t}\n\thandler := func(ctx context.Context, req interface{}) (interface{}, error) {\n\t\treturn srv.(RepoOwnersServer).LeafReviewers(ctx, req.(*RepoFilePath))\n\t}\n\treturn interceptor(ctx, in, info, handler)\n}\n\nfunc _RepoOwners_Approvers_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {\n\tin := new(RepoFilePath)\n\tif err := dec(in); err != nil {\n\t\treturn nil, err\n\t}\n\tif interceptor == nil {\n\t\treturn srv.(RepoOwnersServer).Approvers(ctx, in)\n\t}\n\tinfo := &grpc.UnaryServerInfo{\n\t\tServer: srv,\n\t\tFullMethod: \"\/repoOwners.RepoOwners\/Approvers\",\n\t}\n\thandler := func(ctx context.Context, req interface{}) (interface{}, error) {\n\t\treturn srv.(RepoOwnersServer).Approvers(ctx, req.(*RepoFilePath))\n\t}\n\treturn interceptor(ctx, in, info, handler)\n}\n\nfunc _RepoOwners_Reviewers_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {\n\tin := new(RepoFilePath)\n\tif err := dec(in); err != nil {\n\t\treturn nil, err\n\t}\n\tif interceptor == nil {\n\t\treturn srv.(RepoOwnersServer).Reviewers(ctx, in)\n\t}\n\tinfo := &grpc.UnaryServerInfo{\n\t\tServer: srv,\n\t\tFullMethod: \"\/repoOwners.RepoOwners\/Reviewers\",\n\t}\n\thandler := func(ctx context.Context, req interface{}) (interface{}, error) {\n\t\treturn srv.(RepoOwnersServer).Reviewers(ctx, req.(*RepoFilePath))\n\t}\n\treturn interceptor(ctx, in, info, handler)\n}\n\nfunc _RepoOwners_IsNoParentOwners_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {\n\tin := new(RepoFilePath)\n\tif err := dec(in); err != nil {\n\t\treturn nil, err\n\t}\n\tif interceptor == nil {\n\t\treturn srv.(RepoOwnersServer).IsNoParentOwners(ctx, in)\n\t}\n\tinfo := &grpc.UnaryServerInfo{\n\t\tServer: srv,\n\t\tFullMethod: \"\/repoOwners.RepoOwners\/IsNoParentOwners\",\n\t}\n\thandler := func(ctx context.Context, req interface{}) (interface{}, error) {\n\t\treturn srv.(RepoOwnersServer).IsNoParentOwners(ctx, req.(*RepoFilePath))\n\t}\n\treturn interceptor(ctx, in, info, handler)\n}\n\nfunc _RepoOwners_AllReviewers_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {\n\tin := new(Branch)\n\tif err := dec(in); err != nil {\n\t\treturn nil, err\n\t}\n\tif interceptor == nil {\n\t\treturn srv.(RepoOwnersServer).AllReviewers(ctx, in)\n\t}\n\tinfo := &grpc.UnaryServerInfo{\n\t\tServer: srv,\n\t\tFullMethod: \"\/repoOwners.RepoOwners\/AllReviewers\",\n\t}\n\thandler := func(ctx context.Context, req interface{}) (interface{}, error) {\n\t\treturn srv.(RepoOwnersServer).AllReviewers(ctx, req.(*Branch))\n\t}\n\treturn interceptor(ctx, in, info, handler)\n}\n\nfunc _RepoOwners_TopLevelApprovers_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {\n\tin := new(Branch)\n\tif err := dec(in); err != nil {\n\t\treturn nil, err\n\t}\n\tif interceptor == nil {\n\t\treturn srv.(RepoOwnersServer).TopLevelApprovers(ctx, in)\n\t}\n\tinfo := &grpc.UnaryServerInfo{\n\t\tServer: srv,\n\t\tFullMethod: \"\/repoOwners.RepoOwners\/TopLevelApprovers\",\n\t}\n\thandler := func(ctx context.Context, req interface{}) (interface{}, error) {\n\t\treturn srv.(RepoOwnersServer).TopLevelApprovers(ctx, req.(*Branch))\n\t}\n\treturn interceptor(ctx, in, info, handler)\n}\n\n\/\/ RepoOwners_ServiceDesc is the grpc.ServiceDesc for RepoOwners service.\n\/\/ It's only intended for direct use with grpc.RegisterService,\n\/\/ and not to be introspected or modified (even as a copy)\nvar RepoOwners_ServiceDesc = grpc.ServiceDesc{\n\tServiceName: \"repoOwners.RepoOwners\",\n\tHandlerType: (*RepoOwnersServer)(nil),\n\tMethods: []grpc.MethodDesc{\n\t\t{\n\t\t\tMethodName: \"FindApproverOwnersForFile\",\n\t\t\tHandler: _RepoOwners_FindApproverOwnersForFile_Handler,\n\t\t},\n\t\t{\n\t\t\tMethodName: \"FindReviewersOwnersForFile\",\n\t\t\tHandler: _RepoOwners_FindReviewersOwnersForFile_Handler,\n\t\t},\n\t\t{\n\t\t\tMethodName: \"LeafApprovers\",\n\t\t\tHandler: _RepoOwners_LeafApprovers_Handler,\n\t\t},\n\t\t{\n\t\t\tMethodName: \"LeafReviewers\",\n\t\t\tHandler: _RepoOwners_LeafReviewers_Handler,\n\t\t},\n\t\t{\n\t\t\tMethodName: \"Approvers\",\n\t\t\tHandler: _RepoOwners_Approvers_Handler,\n\t\t},\n\t\t{\n\t\t\tMethodName: \"Reviewers\",\n\t\t\tHandler: _RepoOwners_Reviewers_Handler,\n\t\t},\n\t\t{\n\t\t\tMethodName: \"IsNoParentOwners\",\n\t\t\tHandler: _RepoOwners_IsNoParentOwners_Handler,\n\t\t},\n\t\t{\n\t\t\tMethodName: \"AllReviewers\",\n\t\t\tHandler: _RepoOwners_AllReviewers_Handler,\n\t\t},\n\t\t{\n\t\t\tMethodName: \"TopLevelApprovers\",\n\t\t\tHandler: _RepoOwners_TopLevelApprovers_Handler,\n\t\t},\n\t},\n\tStreams: []grpc.StreamDesc{},\n\tMetadata: \"repo_owners.proto\",\n}\n","avg_line_length":37.7076923077,"max_line_length":182,"alphanum_fraction":0.742622059} +{"size":9642,"ext":"go","lang":"Go","max_stars_count":6.0,"content":"package validation\n\nimport (\n\t\"testing\"\n\n\tuserapi \"github.com\/openshift\/origin\/pkg\/user\/apis\/user\"\n\tmetav1 \"k8s.io\/apimachinery\/pkg\/apis\/meta\/v1\"\n\tkapi \"k8s.io\/kubernetes\/pkg\/api\"\n)\n\nfunc TestValidateGroup(t *testing.T) {\n\tvalidObj := func() *userapi.Group {\n\t\treturn &userapi.Group{\n\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\tName: \"myname\",\n\t\t\t},\n\t\t\tUsers: []string{\"myuser\"},\n\t\t}\n\t}\n\n\tif errs := ValidateGroup(validObj()); len(errs) > 0 {\n\t\tt.Errorf(\"Expected no errors, got %v\", errs)\n\t}\n\n\temptyUser := validObj()\n\temptyUser.Users = []string{\"\"}\n\tif errs := ValidateGroup(emptyUser); len(errs) == 0 {\n\t\tt.Errorf(\"Expected error, got none\")\n\t}\n\n\tinvalidUser := validObj()\n\tinvalidUser.Users = []string{\"bad:user:name\"}\n\tif errs := ValidateGroup(invalidUser); len(errs) == 0 {\n\t\tt.Errorf(\"Expected error, got none\")\n\t}\n\n\tinvalidName := validObj()\n\tinvalidName.Name = \"bad:group:name\"\n\tif errs := ValidateGroup(invalidName); len(errs) == 0 {\n\t\tt.Errorf(\"Expected error, got none\")\n\t}\n}\n\nfunc TestValidateGroupUpdate(t *testing.T) {\n\tvalidObj := func() *userapi.Group {\n\t\treturn &userapi.Group{\n\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\tName: \"myname\",\n\t\t\t\tResourceVersion: \"1\",\n\t\t\t},\n\t\t\tUsers: []string{\"myuser\"},\n\t\t}\n\t}\n\n\toldObj := validObj()\n\n\tif errs := ValidateGroupUpdate(validObj(), oldObj); len(errs) > 0 {\n\t\tt.Errorf(\"Expected no errors, got %v\", errs)\n\t}\n\n\temptyUser := validObj()\n\temptyUser.Users = []string{\"\"}\n\tif errs := ValidateGroupUpdate(emptyUser, oldObj); len(errs) == 0 {\n\t\tt.Errorf(\"Expected error, got none\")\n\t}\n\n\tinvalidUser := validObj()\n\tinvalidUser.Users = []string{\"bad:user:name\"}\n\tif errs := ValidateGroupUpdate(invalidUser, oldObj); len(errs) == 0 {\n\t\tt.Errorf(\"Expected error, got none\")\n\t}\n\n\tinvalidName := validObj()\n\tinvalidName.Name = \"bad:group:name\"\n\tif errs := ValidateGroupUpdate(invalidName, oldObj); len(errs) == 0 {\n\t\tt.Errorf(\"Expected error, got none\")\n\t}\n}\n\nfunc TestValidateUser(t *testing.T) {\n\tvalidObj := func() *userapi.User {\n\t\treturn &userapi.User{\n\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\tName: \"myname\",\n\t\t\t},\n\t\t\tIdentities: []string{\"myprovider:mylogin\"},\n\t\t\tGroups: []string{\"mygroup\"},\n\t\t}\n\t}\n\n\tif errs := ValidateUser(validObj()); len(errs) > 0 {\n\t\tt.Errorf(\"Expected no errors, got %v\", errs)\n\t}\n\n\temptyIdentity := validObj()\n\temptyIdentity.Identities = []string{\"\"}\n\tif errs := ValidateUser(emptyIdentity); len(errs) == 0 {\n\t\tt.Errorf(\"Expected error, got none\")\n\t}\n\n\tinvalidIdentity := validObj()\n\tinvalidIdentity.Identities = []string{\"foo\"}\n\tif errs := ValidateUser(invalidIdentity); len(errs) == 0 {\n\t\tt.Errorf(\"Expected error, got none\")\n\t}\n\n\temptyGroup := validObj()\n\temptyGroup.Groups = []string{\"\"}\n\tif errs := ValidateUser(emptyGroup); len(errs) == 0 {\n\t\tt.Errorf(\"Expected error, got none\")\n\t}\n\n\tinvalidGroup := validObj()\n\tinvalidGroup.Groups = []string{\"bad:group:name\"}\n\tif errs := ValidateUser(invalidGroup); len(errs) == 0 {\n\t\tt.Errorf(\"Expected error, got none\")\n\t}\n}\n\nfunc TestValidateUserUpdate(t *testing.T) {\n\n\tvalidObj := func() *userapi.User {\n\t\treturn &userapi.User{\n\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\tName: \"myname\",\n\t\t\t\tResourceVersion: \"1\",\n\t\t\t},\n\t\t\tIdentities: []string{\"myprovider:mylogin\"},\n\t\t\tGroups: []string{\"mygroup\"},\n\t\t}\n\t}\n\n\toldObj := validObj()\n\n\tif errs := ValidateUserUpdate(validObj(), oldObj); len(errs) > 0 {\n\t\tt.Errorf(\"Expected no errors, got %v\", errs)\n\t}\n\n\temptyIdentity := validObj()\n\temptyIdentity.Identities = []string{\"\"}\n\tif errs := ValidateUserUpdate(emptyIdentity, oldObj); len(errs) == 0 {\n\t\tt.Errorf(\"Expected error, got none\")\n\t}\n\n\tinvalidIdentity := validObj()\n\tinvalidIdentity.Identities = []string{\"foo\"}\n\tif errs := ValidateUserUpdate(invalidIdentity, oldObj); len(errs) == 0 {\n\t\tt.Errorf(\"Expected error, got none\")\n\t}\n\n\temptyGroup := validObj()\n\temptyGroup.Groups = []string{\"\"}\n\tif errs := ValidateUserUpdate(emptyGroup, oldObj); len(errs) == 0 {\n\t\tt.Errorf(\"Expected error, got none\")\n\t}\n\n\tinvalidGroup := validObj()\n\tinvalidGroup.Groups = []string{\"bad:group:name\"}\n\tif errs := ValidateUserUpdate(invalidGroup, oldObj); len(errs) == 0 {\n\t\tt.Errorf(\"Expected error, got none\")\n\t}\n\n}\n\nfunc TestValidateIdentity(t *testing.T) {\n\tvalidObj := func() *userapi.Identity {\n\t\treturn &userapi.Identity{\n\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\tName: \"myprovider:myproviderusername\",\n\t\t\t},\n\t\t\tProviderName: \"myprovider\",\n\t\t\tProviderUserName: \"myproviderusername\",\n\t\t\tUser: kapi.ObjectReference{Name: \"myuser\", UID: \"myuseruid\"},\n\t\t}\n\t}\n\n\tif errs := ValidateIdentity(validObj()); len(errs) > 0 {\n\t\tt.Errorf(\"Expected no errors, got %v\", errs)\n\t}\n\n\tnoUserUID := validObj()\n\tnoUserUID.User.UID = \"\"\n\tif errs := ValidateIdentity(noUserUID); len(errs) == 0 {\n\t\tt.Errorf(\"Expected error, got none\")\n\t}\n\n\temptyProvider := validObj()\n\temptyProvider.ProviderName = \"\"\n\tif errs := ValidateIdentity(emptyProvider); len(errs) == 0 {\n\t\tt.Errorf(\"Expected error, got none\")\n\t}\n\n\tinvalidProvider := validObj()\n\tinvalidProvider.ProviderName = \"foo:bar\"\n\tif errs := ValidateIdentity(invalidProvider); len(errs) == 0 {\n\t\tt.Errorf(\"Expected error, got none\")\n\t}\n\n\temptyProviderUserName := validObj()\n\temptyProviderUserName.ProviderUserName = \"\"\n\tif errs := ValidateIdentity(emptyProviderUserName); len(errs) == 0 {\n\t\tt.Errorf(\"Expected error, got none\")\n\t}\n\n\tinvalidProviderUserName := validObj()\n\tinvalidProviderUserName.ProviderUserName = \"user:name\"\n\tif errs := ValidateIdentity(invalidProviderUserName); len(errs) == 0 {\n\t\tt.Errorf(\"Expected error, got none\")\n\t}\n\n\tmismatchName := validObj()\n\tmismatchName.ProviderUserName = \"myproviderusername2\"\n\tif errs := ValidateIdentity(mismatchName); len(errs) == 0 {\n\t\tt.Errorf(\"Expected error, got none\")\n\t}\n}\n\nfunc TestValidateIdentityUpdate(t *testing.T) {\n\tvalidObj := func() *userapi.Identity {\n\t\treturn &userapi.Identity{\n\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\tName: \"myprovider:myproviderusername\",\n\t\t\t\tResourceVersion: \"1\",\n\t\t\t},\n\t\t\tProviderName: \"myprovider\",\n\t\t\tProviderUserName: \"myproviderusername\",\n\t\t\tUser: kapi.ObjectReference{Name: \"myuser\", UID: \"myuseruid\"},\n\t\t}\n\t}\n\n\toldObj := validObj()\n\n\tif errs := ValidateIdentityUpdate(validObj(), oldObj); len(errs) > 0 {\n\t\tt.Errorf(\"Expected no errors, got %v\", errs)\n\t}\n\n\tnoUserUID := validObj()\n\tnoUserUID.User.UID = \"\"\n\tif errs := ValidateIdentityUpdate(noUserUID, oldObj); len(errs) == 0 {\n\t\tt.Errorf(\"Expected error, got none\")\n\t}\n\n\temptyProvider := validObj()\n\temptyProvider.ProviderName = \"\"\n\tif errs := ValidateIdentityUpdate(emptyProvider, oldObj); len(errs) == 0 {\n\t\tt.Errorf(\"Expected error, got none\")\n\t}\n\n\tinvalidProvider := validObj()\n\tinvalidProvider.ProviderName = \"foo:bar\"\n\tif errs := ValidateIdentityUpdate(invalidProvider, oldObj); len(errs) == 0 {\n\t\tt.Errorf(\"Expected error, got none\")\n\t}\n\n\temptyProviderUserName := validObj()\n\temptyProviderUserName.ProviderUserName = \"\"\n\tif errs := ValidateIdentityUpdate(emptyProviderUserName, oldObj); len(errs) == 0 {\n\t\tt.Errorf(\"Expected error, got none\")\n\t}\n\n\tinvalidProviderUserName := validObj()\n\tinvalidProviderUserName.ProviderUserName = \"user:name\"\n\tif errs := ValidateIdentityUpdate(invalidProviderUserName, oldObj); len(errs) == 0 {\n\t\tt.Errorf(\"Expected error, got none\")\n\t}\n\n\tmismatchName := validObj()\n\tmismatchName.ProviderUserName = \"myproviderusername2\"\n\tif errs := ValidateIdentityUpdate(mismatchName, oldObj); len(errs) == 0 {\n\t\tt.Errorf(\"Expected error, got none\")\n\t}\n}\n\nfunc TestValidateUserIdentityMapping(t *testing.T) {\n\tvalidObj := func() *userapi.UserIdentityMapping {\n\t\treturn &userapi.UserIdentityMapping{\n\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\tName: \"myprovider:myproviderusername\",\n\t\t\t},\n\t\t\tIdentity: kapi.ObjectReference{Name: \"myprovider:myproviderusername\"},\n\t\t\tUser: kapi.ObjectReference{Name: \"myuser\"},\n\t\t}\n\t}\n\n\tif errs := ValidateUserIdentityMapping(validObj()); len(errs) > 0 {\n\t\tt.Errorf(\"Expected no errors, got %v\", errs)\n\t}\n\n\tmismatchName := validObj()\n\tmismatchName.Identity.Name = \"myprovider:myproviderusername2\"\n\tif errs := ValidateUserIdentityMapping(mismatchName); len(errs) == 0 {\n\t\tt.Errorf(\"Expected error, got none\")\n\t}\n\n\temptyIdentityName := validObj()\n\temptyIdentityName.Identity.Name = \"\"\n\tif errs := ValidateUserIdentityMapping(emptyIdentityName); len(errs) == 0 {\n\t\tt.Errorf(\"Expected error, got none\")\n\t}\n\n\temptyUserName := validObj()\n\temptyUserName.Identity.Name = \"\"\n\tif errs := ValidateUserIdentityMapping(emptyUserName); len(errs) == 0 {\n\t\tt.Errorf(\"Expected error, got none\")\n\t}\n}\n\nfunc TestValidateUserIdentityMappingUpdate(t *testing.T) {\n\tvalidObj := func() *userapi.UserIdentityMapping {\n\t\treturn &userapi.UserIdentityMapping{\n\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\tName: \"myprovider:myproviderusername\",\n\t\t\t\tResourceVersion: \"1\",\n\t\t\t},\n\t\t\tIdentity: kapi.ObjectReference{Name: \"myprovider:myproviderusername\"},\n\t\t\tUser: kapi.ObjectReference{Name: \"myuser\"},\n\t\t}\n\t}\n\n\toldObj := validObj()\n\n\tif errs := ValidateUserIdentityMappingUpdate(validObj(), oldObj); len(errs) > 0 {\n\t\tt.Errorf(\"Expected no errors, got %v\", errs)\n\t}\n\n\tmismatchName := validObj()\n\tmismatchName.Identity.Name = \"myprovider:myproviderusername2\"\n\tif errs := ValidateUserIdentityMappingUpdate(mismatchName, oldObj); len(errs) == 0 {\n\t\tt.Errorf(\"Expected error, got none\")\n\t}\n\n\temptyIdentityName := validObj()\n\temptyIdentityName.Identity.Name = \"\"\n\tif errs := ValidateUserIdentityMappingUpdate(emptyIdentityName, oldObj); len(errs) == 0 {\n\t\tt.Errorf(\"Expected error, got none\")\n\t}\n\n\temptyUserName := validObj()\n\temptyUserName.Identity.Name = \"\"\n\tif errs := ValidateUserIdentityMappingUpdate(emptyUserName, oldObj); len(errs) == 0 {\n\t\tt.Errorf(\"Expected error, got none\")\n\t}\n}\n","avg_line_length":28.0290697674,"max_line_length":90,"alphanum_fraction":0.6885500933} +{"size":3284,"ext":"go","lang":"Go","max_stars_count":null,"content":"\/\/ Unless explicitly stated otherwise all files in this repository are licensed\n\/\/ under the Apache License Version 2.0.\n\/\/ This product includes software developed at Datadog (https:\/\/www.datadoghq.com\/).\n\/\/ Copyright 2016-present Datadog, Inc.\n\n\/\/go:build clusterchecks && kubeapiserver\n\/\/ +build clusterchecks,kubeapiserver\n\npackage providers\n\nimport (\n\t\"context\"\n\n\t\"github.com\/DataDog\/datadog-agent\/pkg\/autodiscovery\/integration\"\n\t\"github.com\/DataDog\/datadog-agent\/pkg\/autodiscovery\/providers\/names\"\n\t\"github.com\/DataDog\/datadog-agent\/pkg\/config\"\n\t\"github.com\/DataDog\/datadog-agent\/pkg\/util\/kubernetes\/apiserver\"\n)\n\n\/\/ KubeServiceFileConfigProvider generates cluster checks from check configurations defined in files.\ntype KubeServiceFileConfigProvider struct {\n}\n\n\/\/ NewKubeServiceFileConfigProvider returns a new KubeServiceFileConfigProvider\nfunc NewKubeServiceFileConfigProvider(*config.ConfigurationProviders) (ConfigProvider, error) {\n\treturn &KubeServiceFileConfigProvider{}, nil\n}\n\n\/\/ Collect returns the check configurations defined in Yaml files.\n\/\/ Only configs with advanced AD identifiers targeting kubernetes services are handled by this collector.\nfunc (c *KubeServiceFileConfigProvider) Collect(ctx context.Context) ([]integration.Config, error) {\n\tconfigs, _, err := ReadConfigFiles(WithAdvancedADOnly)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn toKubernetesServiceChecks(configs), nil\n}\n\n\/\/ IsUpToDate is not implemented for the file providers as the files are not meant to change.\nfunc (c *KubeServiceFileConfigProvider) IsUpToDate(ctx context.Context) (bool, error) {\n\treturn false, nil\n}\n\n\/\/ String returns a string representation of the KubeServiceFileConfigProvider.\nfunc (c *KubeServiceFileConfigProvider) String() string {\n\treturn names.KubeServicesFile\n}\n\n\/\/ GetConfigErrors is not implemented for the KubeServiceFileConfigProvider.\nfunc (c *KubeServiceFileConfigProvider) GetConfigErrors() map[string]ErrorMsgSet {\n\treturn make(map[string]ErrorMsgSet)\n}\n\n\/\/ toKubernetesServiceChecks generates integration configs to target\n\/\/ kubernetes services (cluster checks) based on advanced AD identifiers.\nfunc toKubernetesServiceChecks(configs []integration.Config) []integration.Config {\n\tk8sServiceChecks := []integration.Config{}\n\tfor i, config := range configs {\n\t\tif len(config.AdvancedADIdentifiers) > 0 {\n\t\t\tadIdentifiers := toServiceADIdentifiers(config.AdvancedADIdentifiers)\n\t\t\tif len(adIdentifiers) == 0 {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tconfigs[i].ADIdentifiers = adIdentifiers\n\t\t\tconfigs[i].AdvancedADIdentifiers = nil\n\t\t\tconfigs[i].Provider = names.KubeServicesFile\n\t\t\tconfigs[i].ClusterCheck = true\n\n\t\t\tk8sServiceChecks = append(k8sServiceChecks, configs[i])\n\t\t}\n\t}\n\n\treturn k8sServiceChecks\n}\n\n\/\/ toServiceADIdentifiers converts advanced AD identifiers into AD identifiers\nfunc toServiceADIdentifiers(advancedIDs []integration.AdvancedADIdentifier) []string {\n\tadIdentifiers := []string{}\n\tfor _, advancedID := range advancedIDs {\n\t\tif !advancedID.KubeService.IsEmpty() {\n\t\t\tadIdentifiers = append(adIdentifiers, apiserver.EntityForServiceWithNames(advancedID.KubeService.Namespace, advancedID.KubeService.Name))\n\t\t}\n\t}\n\n\treturn adIdentifiers\n}\n\nfunc init() {\n\tRegisterProvider(names.KubeServicesFileRegisterName, NewKubeServiceFileConfigProvider)\n}\n","avg_line_length":35.311827957,"max_line_length":140,"alphanum_fraction":0.7947624848} +{"size":501,"ext":"go","lang":"Go","max_stars_count":1.0,"content":"package main\n\n\/\/ DO NOT EDIT, this file is generated by hover at compile-time for the url_protocol plugin.\n\nimport (\n\turl_protocol \"github.com\/getcouragenow\/plugins\/links\/url_protocol\/go\"\n\tflutter \"github.com\/go-flutter-desktop\/go-flutter\"\n)\n\nfunc init() {\n\t\/\/ Only the init function can be tweaked by plugin maker.\n\toptions = append(options, flutter.AddPlugin(&url_protocol.UrlProtocolPlugin{\n\t\tScheme: \"gcn\",\n\t\tHost: \"\",\n\t\tURLTest: \"gcn:\/\/\/second?message=Hello%20using%20url%20Protocol\",\n\t}))\n}\n","avg_line_length":27.8333333333,"max_line_length":92,"alphanum_fraction":0.7385229541} +{"size":136542,"ext":"go","lang":"Go","max_stars_count":null,"content":"\/\/ Package gmail provides access to the Gmail API.\n\/\/\n\/\/ See https:\/\/developers.google.com\/gmail\/api\/\n\/\/\n\/\/ Usage example:\n\/\/\n\/\/ import \"google.golang.org\/api\/gmail\/v1\"\n\/\/ ...\n\/\/ gmailService, err := gmail.New(oauthHttpClient)\npackage gmail\n\nimport (\n\t\"bytes\"\n\t\"encoding\/json\"\n\t\"errors\"\n\t\"fmt\"\n\t\"golang.org\/x\/net\/context\"\n\t\"google.golang.org\/api\/googleapi\"\n\t\"io\"\n\t\"net\/http\"\n\t\"net\/url\"\n\t\"strconv\"\n\t\"strings\"\n)\n\n\/\/ Always reference these packages, just in case the auto-generated code\n\/\/ below doesn't.\nvar _ = bytes.NewBuffer\nvar _ = strconv.Itoa\nvar _ = fmt.Sprintf\nvar _ = json.NewDecoder\nvar _ = io.Copy\nvar _ = url.Parse\nvar _ = googleapi.Version\nvar _ = errors.New\nvar _ = strings.Replace\nvar _ = context.Background\n\nconst apiId = \"gmail:v1\"\nconst apiName = \"gmail\"\nconst apiVersion = \"v1\"\nconst basePath = \"https:\/\/www.googleapis.com\/gmail\/v1\/users\/\"\n\n\/\/ OAuth2 scopes used by this API.\nconst (\n\t\/\/ View and manage your mail\n\tMailGoogleComScope = \"https:\/\/mail.google.com\/\"\n\n\t\/\/ Manage drafts and send emails\n\tGmailComposeScope = \"https:\/\/www.googleapis.com\/auth\/gmail.compose\"\n\n\t\/\/ Insert mail into your mailbox\n\tGmailInsertScope = \"https:\/\/www.googleapis.com\/auth\/gmail.insert\"\n\n\t\/\/ Manage mailbox labels\n\tGmailLabelsScope = \"https:\/\/www.googleapis.com\/auth\/gmail.labels\"\n\n\t\/\/ View and modify but not delete your email\n\tGmailModifyScope = \"https:\/\/www.googleapis.com\/auth\/gmail.modify\"\n\n\t\/\/ View your emails messages and settings\n\tGmailReadonlyScope = \"https:\/\/www.googleapis.com\/auth\/gmail.readonly\"\n\n\t\/\/ Send email on your behalf\n\tGmailSendScope = \"https:\/\/www.googleapis.com\/auth\/gmail.send\"\n)\n\nfunc New(client *http.Client) (*Service, error) {\n\tif client == nil {\n\t\treturn nil, errors.New(\"client is nil\")\n\t}\n\ts := &Service{client: client, BasePath: basePath}\n\ts.Users = NewUsersService(s)\n\treturn s, nil\n}\n\ntype Service struct {\n\tclient *http.Client\n\tBasePath string \/\/ API endpoint base URL\n\tUserAgent string \/\/ optional additional User-Agent fragment\n\n\tUsers *UsersService\n}\n\nfunc (s *Service) userAgent() string {\n\tif s.UserAgent == \"\" {\n\t\treturn googleapi.UserAgent\n\t}\n\treturn googleapi.UserAgent + \" \" + s.UserAgent\n}\n\nfunc NewUsersService(s *Service) *UsersService {\n\trs := &UsersService{s: s}\n\trs.Drafts = NewUsersDraftsService(s)\n\trs.History = NewUsersHistoryService(s)\n\trs.Labels = NewUsersLabelsService(s)\n\trs.Messages = NewUsersMessagesService(s)\n\trs.Threads = NewUsersThreadsService(s)\n\treturn rs\n}\n\ntype UsersService struct {\n\ts *Service\n\n\tDrafts *UsersDraftsService\n\n\tHistory *UsersHistoryService\n\n\tLabels *UsersLabelsService\n\n\tMessages *UsersMessagesService\n\n\tThreads *UsersThreadsService\n}\n\nfunc NewUsersDraftsService(s *Service) *UsersDraftsService {\n\trs := &UsersDraftsService{s: s}\n\treturn rs\n}\n\ntype UsersDraftsService struct {\n\ts *Service\n}\n\nfunc NewUsersHistoryService(s *Service) *UsersHistoryService {\n\trs := &UsersHistoryService{s: s}\n\treturn rs\n}\n\ntype UsersHistoryService struct {\n\ts *Service\n}\n\nfunc NewUsersLabelsService(s *Service) *UsersLabelsService {\n\trs := &UsersLabelsService{s: s}\n\treturn rs\n}\n\ntype UsersLabelsService struct {\n\ts *Service\n}\n\nfunc NewUsersMessagesService(s *Service) *UsersMessagesService {\n\trs := &UsersMessagesService{s: s}\n\trs.Attachments = NewUsersMessagesAttachmentsService(s)\n\treturn rs\n}\n\ntype UsersMessagesService struct {\n\ts *Service\n\n\tAttachments *UsersMessagesAttachmentsService\n}\n\nfunc NewUsersMessagesAttachmentsService(s *Service) *UsersMessagesAttachmentsService {\n\trs := &UsersMessagesAttachmentsService{s: s}\n\treturn rs\n}\n\ntype UsersMessagesAttachmentsService struct {\n\ts *Service\n}\n\nfunc NewUsersThreadsService(s *Service) *UsersThreadsService {\n\trs := &UsersThreadsService{s: s}\n\treturn rs\n}\n\ntype UsersThreadsService struct {\n\ts *Service\n}\n\n\/\/ Draft: A draft email in the user's mailbox.\ntype Draft struct {\n\t\/\/ Id: The immutable ID of the draft.\n\tId string `json:\"id,omitempty\"`\n\n\t\/\/ Message: The message content of the draft.\n\tMessage *Message `json:\"message,omitempty\"`\n}\n\n\/\/ History: A record of a change to the user's mailbox. Each history\n\/\/ change may affect multiple messages in multiple ways.\ntype History struct {\n\t\/\/ Id: The mailbox sequence ID.\n\tId uint64 `json:\"id,omitempty,string\"`\n\n\t\/\/ LabelsAdded: Labels added to messages in this history record.\n\tLabelsAdded []*HistoryLabelAdded `json:\"labelsAdded,omitempty\"`\n\n\t\/\/ LabelsRemoved: Labels removed from messages in this history record.\n\tLabelsRemoved []*HistoryLabelRemoved `json:\"labelsRemoved,omitempty\"`\n\n\t\/\/ Messages: List of messages changed in this history record. The fields\n\t\/\/ for specific change types, such as messagesAdded may duplicate\n\t\/\/ messages in this field. We recommend using the specific change-type\n\t\/\/ fields instead of this.\n\tMessages []*Message `json:\"messages,omitempty\"`\n\n\t\/\/ MessagesAdded: Messages added to the mailbox in this history record.\n\tMessagesAdded []*HistoryMessageAdded `json:\"messagesAdded,omitempty\"`\n\n\t\/\/ MessagesDeleted: Messages deleted (not Trashed) from the mailbox in\n\t\/\/ this history record.\n\tMessagesDeleted []*HistoryMessageDeleted `json:\"messagesDeleted,omitempty\"`\n}\n\ntype HistoryLabelAdded struct {\n\t\/\/ LabelIds: Label IDs added to the message.\n\tLabelIds []string `json:\"labelIds,omitempty\"`\n\n\tMessage *Message `json:\"message,omitempty\"`\n}\n\ntype HistoryLabelRemoved struct {\n\t\/\/ LabelIds: Label IDs removed from the message.\n\tLabelIds []string `json:\"labelIds,omitempty\"`\n\n\tMessage *Message `json:\"message,omitempty\"`\n}\n\ntype HistoryMessageAdded struct {\n\tMessage *Message `json:\"message,omitempty\"`\n}\n\ntype HistoryMessageDeleted struct {\n\tMessage *Message `json:\"message,omitempty\"`\n}\n\n\/\/ Label: Labels are used to categorize messages and threads within the\n\/\/ user's mailbox.\ntype Label struct {\n\t\/\/ Id: The immutable ID of the label.\n\tId string `json:\"id,omitempty\"`\n\n\t\/\/ LabelListVisibility: The visibility of the label in the label list in\n\t\/\/ the Gmail web interface.\n\t\/\/\n\t\/\/ Possible values:\n\t\/\/ \"labelHide\"\n\t\/\/ \"labelShow\"\n\t\/\/ \"labelShowIfUnread\"\n\tLabelListVisibility string `json:\"labelListVisibility,omitempty\"`\n\n\t\/\/ MessageListVisibility: The visibility of the label in the message\n\t\/\/ list in the Gmail web interface.\n\t\/\/\n\t\/\/ Possible values:\n\t\/\/ \"hide\"\n\t\/\/ \"show\"\n\tMessageListVisibility string `json:\"messageListVisibility,omitempty\"`\n\n\t\/\/ MessagesTotal: The total number of messages with the label.\n\tMessagesTotal int64 `json:\"messagesTotal,omitempty\"`\n\n\t\/\/ MessagesUnread: The number of unread messages with the label.\n\tMessagesUnread int64 `json:\"messagesUnread,omitempty\"`\n\n\t\/\/ Name: The display name of the label.\n\tName string `json:\"name,omitempty\"`\n\n\t\/\/ ThreadsTotal: The total number of threads with the label.\n\tThreadsTotal int64 `json:\"threadsTotal,omitempty\"`\n\n\t\/\/ ThreadsUnread: The number of unread threads with the label.\n\tThreadsUnread int64 `json:\"threadsUnread,omitempty\"`\n\n\t\/\/ Type: The owner type for the label. User labels are created by the\n\t\/\/ user and can be modified and deleted by the user and can be applied\n\t\/\/ to any message or thread. System labels are internally created and\n\t\/\/ cannot be added, modified, or deleted. System labels may be able to\n\t\/\/ be applied to or removed from messages and threads under some\n\t\/\/ circumstances but this is not guaranteed. For example, users can\n\t\/\/ apply and remove the INBOX and UNREAD labels from messages and\n\t\/\/ threads, but cannot apply or remove the DRAFTS or SENT labels from\n\t\/\/ messages or threads.\n\t\/\/\n\t\/\/ Possible values:\n\t\/\/ \"system\"\n\t\/\/ \"user\"\n\tType string `json:\"type,omitempty\"`\n}\n\ntype ListDraftsResponse struct {\n\t\/\/ Drafts: List of drafts.\n\tDrafts []*Draft `json:\"drafts,omitempty\"`\n\n\t\/\/ NextPageToken: Token to retrieve the next page of results in the\n\t\/\/ list.\n\tNextPageToken string `json:\"nextPageToken,omitempty\"`\n\n\t\/\/ ResultSizeEstimate: Estimated total number of results.\n\tResultSizeEstimate int64 `json:\"resultSizeEstimate,omitempty\"`\n}\n\ntype ListHistoryResponse struct {\n\t\/\/ History: List of history records. Any messages contained in the\n\t\/\/ response will typically only have id and threadId fields populated.\n\tHistory []*History `json:\"history,omitempty\"`\n\n\t\/\/ HistoryId: The ID of the mailbox's current history record.\n\tHistoryId uint64 `json:\"historyId,omitempty,string\"`\n\n\t\/\/ NextPageToken: Page token to retrieve the next page of results in the\n\t\/\/ list.\n\tNextPageToken string `json:\"nextPageToken,omitempty\"`\n}\n\ntype ListLabelsResponse struct {\n\t\/\/ Labels: List of labels.\n\tLabels []*Label `json:\"labels,omitempty\"`\n}\n\ntype ListMessagesResponse struct {\n\t\/\/ Messages: List of messages.\n\tMessages []*Message `json:\"messages,omitempty\"`\n\n\t\/\/ NextPageToken: Token to retrieve the next page of results in the\n\t\/\/ list.\n\tNextPageToken string `json:\"nextPageToken,omitempty\"`\n\n\t\/\/ ResultSizeEstimate: Estimated total number of results.\n\tResultSizeEstimate int64 `json:\"resultSizeEstimate,omitempty\"`\n}\n\ntype ListThreadsResponse struct {\n\t\/\/ NextPageToken: Page token to retrieve the next page of results in the\n\t\/\/ list.\n\tNextPageToken string `json:\"nextPageToken,omitempty\"`\n\n\t\/\/ ResultSizeEstimate: Estimated total number of results.\n\tResultSizeEstimate int64 `json:\"resultSizeEstimate,omitempty\"`\n\n\t\/\/ Threads: List of threads.\n\tThreads []*Thread `json:\"threads,omitempty\"`\n}\n\n\/\/ Message: An email message.\ntype Message struct {\n\t\/\/ HistoryId: The ID of the last history record that modified this\n\t\/\/ message.\n\tHistoryId uint64 `json:\"historyId,omitempty,string\"`\n\n\t\/\/ Id: The immutable ID of the message.\n\tId string `json:\"id,omitempty\"`\n\n\t\/\/ InternalDate: The internal message creation timestamp (epoch ms),\n\t\/\/ which determines ordering in the inbox. For normal SMTP-received\n\t\/\/ email, this represents the time the message was originally accepted\n\t\/\/ by Google, which is more reliable than the Date header. However, for\n\t\/\/ API-migrated mail, it can be configured by client to be based on the\n\t\/\/ Date header.\n\tInternalDate int64 `json:\"internalDate,omitempty,string\"`\n\n\t\/\/ LabelIds: List of IDs of labels applied to this message.\n\tLabelIds []string `json:\"labelIds,omitempty\"`\n\n\t\/\/ Payload: The parsed email structure in the message parts.\n\tPayload *MessagePart `json:\"payload,omitempty\"`\n\n\t\/\/ Raw: The entire email message in an RFC 2822 formatted and base64url\n\t\/\/ encoded string. Returned in messages.get and drafts.get responses\n\t\/\/ when the format=RAW parameter is supplied.\n\tRaw string `json:\"raw,omitempty\"`\n\n\t\/\/ SizeEstimate: Estimated size in bytes of the message.\n\tSizeEstimate int64 `json:\"sizeEstimate,omitempty\"`\n\n\t\/\/ Snippet: A short part of the message text.\n\tSnippet string `json:\"snippet,omitempty\"`\n\n\t\/\/ ThreadId: The ID of the thread the message belongs to. To add a\n\t\/\/ message or draft to a thread, the following criteria must be met:\n\t\/\/ - The requested threadId must be specified on the Message or\n\t\/\/ Draft.Message you supply with your request.\n\t\/\/ - The References and In-Reply-To headers must be set in compliance\n\t\/\/ with the RFC 2822 standard.\n\t\/\/ - The Subject headers must match.\n\tThreadId string `json:\"threadId,omitempty\"`\n}\n\n\/\/ MessagePart: A single MIME message part.\ntype MessagePart struct {\n\t\/\/ Body: The message part body for this part, which may be empty for\n\t\/\/ container MIME message parts.\n\tBody *MessagePartBody `json:\"body,omitempty\"`\n\n\t\/\/ Filename: The filename of the attachment. Only present if this\n\t\/\/ message part represents an attachment.\n\tFilename string `json:\"filename,omitempty\"`\n\n\t\/\/ Headers: List of headers on this message part. For the top-level\n\t\/\/ message part, representing the entire message payload, it will\n\t\/\/ contain the standard RFC 2822 email headers such as To, From, and\n\t\/\/ Subject.\n\tHeaders []*MessagePartHeader `json:\"headers,omitempty\"`\n\n\t\/\/ MimeType: The MIME type of the message part.\n\tMimeType string `json:\"mimeType,omitempty\"`\n\n\t\/\/ PartId: The immutable ID of the message part.\n\tPartId string `json:\"partId,omitempty\"`\n\n\t\/\/ Parts: The child MIME message parts of this part. This only applies\n\t\/\/ to container MIME message parts, for example multipart\/*. For non-\n\t\/\/ container MIME message part types, such as text\/plain, this field is\n\t\/\/ empty. For more information, see RFC 1521.\n\tParts []*MessagePart `json:\"parts,omitempty\"`\n}\n\n\/\/ MessagePartBody: The body of a single MIME message part.\ntype MessagePartBody struct {\n\t\/\/ AttachmentId: When present, contains the ID of an external attachment\n\t\/\/ that can be retrieved in a separate messages.attachments.get request.\n\t\/\/ When not present, the entire content of the message part body is\n\t\/\/ contained in the data field.\n\tAttachmentId string `json:\"attachmentId,omitempty\"`\n\n\t\/\/ Data: The body data of a MIME message part. May be empty for MIME\n\t\/\/ container types that have no message body or when the body data is\n\t\/\/ sent as a separate attachment. An attachment ID is present if the\n\t\/\/ body data is contained in a separate attachment.\n\tData string `json:\"data,omitempty\"`\n\n\t\/\/ Size: Total number of bytes in the body of the message part.\n\tSize int64 `json:\"size,omitempty\"`\n}\n\ntype MessagePartHeader struct {\n\t\/\/ Name: The name of the header before the : separator. For example, To.\n\tName string `json:\"name,omitempty\"`\n\n\t\/\/ Value: The value of the header after the : separator. For example,\n\t\/\/ someuser@example.com.\n\tValue string `json:\"value,omitempty\"`\n}\n\ntype ModifyMessageRequest struct {\n\t\/\/ AddLabelIds: A list of IDs of labels to add to this message.\n\tAddLabelIds []string `json:\"addLabelIds,omitempty\"`\n\n\t\/\/ RemoveLabelIds: A list IDs of labels to remove from this message.\n\tRemoveLabelIds []string `json:\"removeLabelIds,omitempty\"`\n}\n\ntype ModifyThreadRequest struct {\n\t\/\/ AddLabelIds: A list of IDs of labels to add to this thread.\n\tAddLabelIds []string `json:\"addLabelIds,omitempty\"`\n\n\t\/\/ RemoveLabelIds: A list of IDs of labels to remove from this thread.\n\tRemoveLabelIds []string `json:\"removeLabelIds,omitempty\"`\n}\n\n\/\/ Profile: Profile for a Gmail user.\ntype Profile struct {\n\t\/\/ EmailAddress: The user's email address.\n\tEmailAddress string `json:\"emailAddress,omitempty\"`\n\n\t\/\/ HistoryId: The ID of the mailbox's current history record.\n\tHistoryId uint64 `json:\"historyId,omitempty,string\"`\n\n\t\/\/ MessagesTotal: The total number of messages in the mailbox.\n\tMessagesTotal int64 `json:\"messagesTotal,omitempty\"`\n\n\t\/\/ ThreadsTotal: The total number of threads in the mailbox.\n\tThreadsTotal int64 `json:\"threadsTotal,omitempty\"`\n}\n\n\/\/ Thread: A collection of messages representing a conversation.\ntype Thread struct {\n\t\/\/ HistoryId: The ID of the last history record that modified this\n\t\/\/ thread.\n\tHistoryId uint64 `json:\"historyId,omitempty,string\"`\n\n\t\/\/ Id: The unique ID of the thread.\n\tId string `json:\"id,omitempty\"`\n\n\t\/\/ Messages: The list of messages in the thread.\n\tMessages []*Message `json:\"messages,omitempty\"`\n\n\t\/\/ Snippet: A short part of the message text.\n\tSnippet string `json:\"snippet,omitempty\"`\n}\n\n\/\/ WatchRequest: Set up or update a new push notification watch on this\n\/\/ user's mailbox.\ntype WatchRequest struct {\n\t\/\/ LabelFilterAction: Filtering behavior of labelIds list specified.\n\t\/\/\n\t\/\/ Possible values:\n\t\/\/ \"exclude\"\n\t\/\/ \"include\"\n\tLabelFilterAction string `json:\"labelFilterAction,omitempty\"`\n\n\t\/\/ LabelIds: List of label_ids to restrict notifications about. By\n\t\/\/ default, if unspecified, all changes are pushed out. If specified\n\t\/\/ then dictates which labels are required for a push notification to be\n\t\/\/ generated.\n\tLabelIds []string `json:\"labelIds,omitempty\"`\n\n\t\/\/ TopicName: A fully qualified Google Cloud Pub\/Sub API topic name to\n\t\/\/ publish the events to. This topic name **must** already exist in\n\t\/\/ Cloud Pub\/Sub and you **must** have already granted gmail \"publish\"\n\t\/\/ permission on it. For example,\n\t\/\/ \"projects\/my-project-identifier\/topics\/my-topic-name\" (using the new\n\t\/\/ Cloud Pub\/Sub \"v1beta2\" topic naming format).\n\t\/\/\n\t\/\/ Note that the \"my-project-identifier\" portion must exactly match your\n\t\/\/ Google developer project id (the one executing this watch request).\n\tTopicName string `json:\"topicName,omitempty\"`\n}\n\n\/\/ WatchResponse: Push notification watch response.\ntype WatchResponse struct {\n\t\/\/ Expiration: When Gmail will stop sending notifications for mailbox\n\t\/\/ updates (epoch millis). Call watch again before this time to renew\n\t\/\/ the watch.\n\tExpiration int64 `json:\"expiration,omitempty,string\"`\n\n\t\/\/ HistoryId: The ID of the mailbox's current history record.\n\tHistoryId uint64 `json:\"historyId,omitempty,string\"`\n}\n\n\/\/ method id \"gmail.users.getProfile\":\n\ntype UsersGetProfileCall struct {\n\ts *Service\n\tuserId string\n\topt_ map[string]interface{}\n}\n\n\/\/ GetProfile: Gets the current user's Gmail profile.\nfunc (r *UsersService) GetProfile(userId string) *UsersGetProfileCall {\n\tc := &UsersGetProfileCall{s: r.s, opt_: make(map[string]interface{})}\n\tc.userId = userId\n\treturn c\n}\n\n\/\/ Fields allows partial responses to be retrieved.\n\/\/ See https:\/\/developers.google.com\/gdata\/docs\/2.0\/basics#PartialResponse\n\/\/ for more information.\nfunc (c *UsersGetProfileCall) Fields(s ...googleapi.Field) *UsersGetProfileCall {\n\tc.opt_[\"fields\"] = googleapi.CombineFields(s)\n\treturn c\n}\n\nfunc (c *UsersGetProfileCall) doRequest(alt string) (*http.Response, error) {\n\tvar body io.Reader = nil\n\tparams := make(url.Values)\n\tparams.Set(\"alt\", alt)\n\tif v, ok := c.opt_[\"fields\"]; ok {\n\t\tparams.Set(\"fields\", fmt.Sprintf(\"%v\", v))\n\t}\n\turls := googleapi.ResolveRelative(c.s.BasePath, \"{userId}\/profile\")\n\turls += \"?\" + params.Encode()\n\treq, _ := http.NewRequest(\"GET\", urls, body)\n\tgoogleapi.Expand(req.URL, map[string]string{\n\t\t\"userId\": c.userId,\n\t})\n\treq.Header.Set(\"User-Agent\", c.s.userAgent())\n\treturn c.s.client.Do(req)\n}\n\nfunc (c *UsersGetProfileCall) Do() (*Profile, error) {\n\tres, err := c.doRequest(\"json\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer googleapi.CloseBody(res)\n\tif err := googleapi.CheckResponse(res); err != nil {\n\t\treturn nil, err\n\t}\n\tvar ret *Profile\n\tif err := json.NewDecoder(res.Body).Decode(&ret); err != nil {\n\t\treturn nil, err\n\t}\n\treturn ret, nil\n\t\/\/ {\n\t\/\/ \"description\": \"Gets the current user's Gmail profile.\",\n\t\/\/ \"httpMethod\": \"GET\",\n\t\/\/ \"id\": \"gmail.users.getProfile\",\n\t\/\/ \"parameterOrder\": [\n\t\/\/ \"userId\"\n\t\/\/ ],\n\t\/\/ \"parameters\": {\n\t\/\/ \"userId\": {\n\t\/\/ \"default\": \"me\",\n\t\/\/ \"description\": \"The user's email address. The special value me can be used to indicate the authenticated user.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ }\n\t\/\/ },\n\t\/\/ \"path\": \"{userId}\/profile\",\n\t\/\/ \"response\": {\n\t\/\/ \"$ref\": \"Profile\"\n\t\/\/ },\n\t\/\/ \"scopes\": [\n\t\/\/ \"https:\/\/mail.google.com\/\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.compose\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.modify\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.readonly\"\n\t\/\/ ]\n\t\/\/ }\n\n}\n\n\/\/ method id \"gmail.users.stop\":\n\ntype UsersStopCall struct {\n\ts *Service\n\tuserId string\n\topt_ map[string]interface{}\n}\n\n\/\/ Stop: Stop receiving push notifications for the given user mailbox.\nfunc (r *UsersService) Stop(userId string) *UsersStopCall {\n\tc := &UsersStopCall{s: r.s, opt_: make(map[string]interface{})}\n\tc.userId = userId\n\treturn c\n}\n\n\/\/ Fields allows partial responses to be retrieved.\n\/\/ See https:\/\/developers.google.com\/gdata\/docs\/2.0\/basics#PartialResponse\n\/\/ for more information.\nfunc (c *UsersStopCall) Fields(s ...googleapi.Field) *UsersStopCall {\n\tc.opt_[\"fields\"] = googleapi.CombineFields(s)\n\treturn c\n}\n\nfunc (c *UsersStopCall) doRequest(alt string) (*http.Response, error) {\n\tvar body io.Reader = nil\n\tparams := make(url.Values)\n\tparams.Set(\"alt\", alt)\n\tif v, ok := c.opt_[\"fields\"]; ok {\n\t\tparams.Set(\"fields\", fmt.Sprintf(\"%v\", v))\n\t}\n\turls := googleapi.ResolveRelative(c.s.BasePath, \"{userId}\/stop\")\n\turls += \"?\" + params.Encode()\n\treq, _ := http.NewRequest(\"POST\", urls, body)\n\tgoogleapi.Expand(req.URL, map[string]string{\n\t\t\"userId\": c.userId,\n\t})\n\treq.Header.Set(\"User-Agent\", c.s.userAgent())\n\treturn c.s.client.Do(req)\n}\n\nfunc (c *UsersStopCall) Do() error {\n\tres, err := c.doRequest(\"json\")\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer googleapi.CloseBody(res)\n\tif err := googleapi.CheckResponse(res); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n\t\/\/ {\n\t\/\/ \"description\": \"Stop receiving push notifications for the given user mailbox.\",\n\t\/\/ \"httpMethod\": \"POST\",\n\t\/\/ \"id\": \"gmail.users.stop\",\n\t\/\/ \"parameterOrder\": [\n\t\/\/ \"userId\"\n\t\/\/ ],\n\t\/\/ \"parameters\": {\n\t\/\/ \"userId\": {\n\t\/\/ \"default\": \"me\",\n\t\/\/ \"description\": \"The user's email address. The special value me can be used to indicate the authenticated user.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ }\n\t\/\/ },\n\t\/\/ \"path\": \"{userId}\/stop\",\n\t\/\/ \"scopes\": [\n\t\/\/ \"https:\/\/mail.google.com\/\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.modify\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.readonly\"\n\t\/\/ ]\n\t\/\/ }\n\n}\n\n\/\/ method id \"gmail.users.watch\":\n\ntype UsersWatchCall struct {\n\ts *Service\n\tuserId string\n\twatchrequest *WatchRequest\n\topt_ map[string]interface{}\n}\n\n\/\/ Watch: Set up or update a push notification watch on the given user\n\/\/ mailbox.\nfunc (r *UsersService) Watch(userId string, watchrequest *WatchRequest) *UsersWatchCall {\n\tc := &UsersWatchCall{s: r.s, opt_: make(map[string]interface{})}\n\tc.userId = userId\n\tc.watchrequest = watchrequest\n\treturn c\n}\n\n\/\/ Fields allows partial responses to be retrieved.\n\/\/ See https:\/\/developers.google.com\/gdata\/docs\/2.0\/basics#PartialResponse\n\/\/ for more information.\nfunc (c *UsersWatchCall) Fields(s ...googleapi.Field) *UsersWatchCall {\n\tc.opt_[\"fields\"] = googleapi.CombineFields(s)\n\treturn c\n}\n\nfunc (c *UsersWatchCall) doRequest(alt string) (*http.Response, error) {\n\tvar body io.Reader = nil\n\tbody, err := googleapi.WithoutDataWrapper.JSONReader(c.watchrequest)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tctype := \"application\/json\"\n\tparams := make(url.Values)\n\tparams.Set(\"alt\", alt)\n\tif v, ok := c.opt_[\"fields\"]; ok {\n\t\tparams.Set(\"fields\", fmt.Sprintf(\"%v\", v))\n\t}\n\turls := googleapi.ResolveRelative(c.s.BasePath, \"{userId}\/watch\")\n\turls += \"?\" + params.Encode()\n\treq, _ := http.NewRequest(\"POST\", urls, body)\n\tgoogleapi.Expand(req.URL, map[string]string{\n\t\t\"userId\": c.userId,\n\t})\n\treq.Header.Set(\"Content-Type\", ctype)\n\treq.Header.Set(\"User-Agent\", c.s.userAgent())\n\treturn c.s.client.Do(req)\n}\n\nfunc (c *UsersWatchCall) Do() (*WatchResponse, error) {\n\tres, err := c.doRequest(\"json\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer googleapi.CloseBody(res)\n\tif err := googleapi.CheckResponse(res); err != nil {\n\t\treturn nil, err\n\t}\n\tvar ret *WatchResponse\n\tif err := json.NewDecoder(res.Body).Decode(&ret); err != nil {\n\t\treturn nil, err\n\t}\n\treturn ret, nil\n\t\/\/ {\n\t\/\/ \"description\": \"Set up or update a push notification watch on the given user mailbox.\",\n\t\/\/ \"httpMethod\": \"POST\",\n\t\/\/ \"id\": \"gmail.users.watch\",\n\t\/\/ \"parameterOrder\": [\n\t\/\/ \"userId\"\n\t\/\/ ],\n\t\/\/ \"parameters\": {\n\t\/\/ \"userId\": {\n\t\/\/ \"default\": \"me\",\n\t\/\/ \"description\": \"The user's email address. The special value me can be used to indicate the authenticated user.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ }\n\t\/\/ },\n\t\/\/ \"path\": \"{userId}\/watch\",\n\t\/\/ \"request\": {\n\t\/\/ \"$ref\": \"WatchRequest\"\n\t\/\/ },\n\t\/\/ \"response\": {\n\t\/\/ \"$ref\": \"WatchResponse\"\n\t\/\/ },\n\t\/\/ \"scopes\": [\n\t\/\/ \"https:\/\/mail.google.com\/\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.modify\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.readonly\"\n\t\/\/ ]\n\t\/\/ }\n\n}\n\n\/\/ method id \"gmail.users.drafts.create\":\n\ntype UsersDraftsCreateCall struct {\n\ts *Service\n\tuserId string\n\tdraft *Draft\n\topt_ map[string]interface{}\n\tmedia_ io.Reader\n\tresumable_ googleapi.SizeReaderAt\n\tmediaType_ string\n\tctx_ context.Context\n\tprotocol_ string\n}\n\n\/\/ Create: Creates a new draft with the DRAFT label.\nfunc (r *UsersDraftsService) Create(userId string, draft *Draft) *UsersDraftsCreateCall {\n\tc := &UsersDraftsCreateCall{s: r.s, opt_: make(map[string]interface{})}\n\tc.userId = userId\n\tc.draft = draft\n\treturn c\n}\n\n\/\/ Media specifies the media to upload in a single chunk.\n\/\/ At most one of Media and ResumableMedia may be set.\nfunc (c *UsersDraftsCreateCall) Media(r io.Reader) *UsersDraftsCreateCall {\n\tc.media_ = r\n\tc.protocol_ = \"multipart\"\n\treturn c\n}\n\n\/\/ ResumableMedia specifies the media to upload in chunks and can be cancelled with ctx.\n\/\/ At most one of Media and ResumableMedia may be set.\n\/\/ mediaType identifies the MIME media type of the upload, such as \"image\/png\".\n\/\/ If mediaType is \"\", it will be auto-detected.\nfunc (c *UsersDraftsCreateCall) ResumableMedia(ctx context.Context, r io.ReaderAt, size int64, mediaType string) *UsersDraftsCreateCall {\n\tc.ctx_ = ctx\n\tc.resumable_ = io.NewSectionReader(r, 0, size)\n\tc.mediaType_ = mediaType\n\tc.protocol_ = \"resumable\"\n\treturn c\n}\n\n\/\/ ProgressUpdater provides a callback function that will be called after every chunk.\n\/\/ It should be a low-latency function in order to not slow down the upload operation.\n\/\/ This should only be called when using ResumableMedia (as opposed to Media).\nfunc (c *UsersDraftsCreateCall) ProgressUpdater(pu googleapi.ProgressUpdater) *UsersDraftsCreateCall {\n\tc.opt_[\"progressUpdater\"] = pu\n\treturn c\n}\n\n\/\/ Fields allows partial responses to be retrieved.\n\/\/ See https:\/\/developers.google.com\/gdata\/docs\/2.0\/basics#PartialResponse\n\/\/ for more information.\nfunc (c *UsersDraftsCreateCall) Fields(s ...googleapi.Field) *UsersDraftsCreateCall {\n\tc.opt_[\"fields\"] = googleapi.CombineFields(s)\n\treturn c\n}\n\nfunc (c *UsersDraftsCreateCall) doRequest(alt string) (*http.Response, error) {\n\tvar body io.Reader = nil\n\tbody, err := googleapi.WithoutDataWrapper.JSONReader(c.draft)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tctype := \"application\/json\"\n\tparams := make(url.Values)\n\tparams.Set(\"alt\", alt)\n\tif v, ok := c.opt_[\"fields\"]; ok {\n\t\tparams.Set(\"fields\", fmt.Sprintf(\"%v\", v))\n\t}\n\turls := googleapi.ResolveRelative(c.s.BasePath, \"{userId}\/drafts\")\n\tif c.media_ != nil || c.resumable_ != nil {\n\t\turls = strings.Replace(urls, \"https:\/\/www.googleapis.com\/\", \"https:\/\/www.googleapis.com\/upload\/\", 1)\n\t\tparams.Set(\"uploadType\", c.protocol_)\n\t}\n\turls += \"?\" + params.Encode()\n\tif c.protocol_ != \"resumable\" {\n\t\tvar cancel func()\n\t\tcancel, _ = googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)\n\t\tif cancel != nil {\n\t\t\tdefer cancel()\n\t\t}\n\t}\n\treq, _ := http.NewRequest(\"POST\", urls, body)\n\tgoogleapi.Expand(req.URL, map[string]string{\n\t\t\"userId\": c.userId,\n\t})\n\tif c.protocol_ == \"resumable\" {\n\t\tif c.mediaType_ == \"\" {\n\t\t\tc.mediaType_ = googleapi.DetectMediaType(c.resumable_)\n\t\t}\n\t\treq.Header.Set(\"X-Upload-Content-Type\", c.mediaType_)\n\t\treq.Header.Set(\"Content-Type\", \"application\/json; charset=utf-8\")\n\t} else {\n\t\treq.Header.Set(\"Content-Type\", ctype)\n\t}\n\treq.Header.Set(\"User-Agent\", c.s.userAgent())\n\treturn c.s.client.Do(req)\n}\n\nfunc (c *UsersDraftsCreateCall) Do() (*Draft, error) {\n\tres, err := c.doRequest(\"json\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer googleapi.CloseBody(res)\n\tif err := googleapi.CheckResponse(res); err != nil {\n\t\treturn nil, err\n\t}\n\tvar progressUpdater_ googleapi.ProgressUpdater\n\tif v, ok := c.opt_[\"progressUpdater\"]; ok {\n\t\tif pu, ok := v.(googleapi.ProgressUpdater); ok {\n\t\t\tprogressUpdater_ = pu\n\t\t}\n\t}\n\tif c.protocol_ == \"resumable\" {\n\t\tloc := res.Header.Get(\"Location\")\n\t\trx := &googleapi.ResumableUpload{\n\t\t\tClient: c.s.client,\n\t\t\tUserAgent: c.s.userAgent(),\n\t\t\tURI: loc,\n\t\t\tMedia: c.resumable_,\n\t\t\tMediaType: c.mediaType_,\n\t\t\tContentLength: c.resumable_.Size(),\n\t\t\tCallback: progressUpdater_,\n\t\t}\n\t\tres, err = rx.Upload(c.ctx_)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tdefer res.Body.Close()\n\t}\n\tvar ret *Draft\n\tif err := json.NewDecoder(res.Body).Decode(&ret); err != nil {\n\t\treturn nil, err\n\t}\n\treturn ret, nil\n\t\/\/ {\n\t\/\/ \"description\": \"Creates a new draft with the DRAFT label.\",\n\t\/\/ \"httpMethod\": \"POST\",\n\t\/\/ \"id\": \"gmail.users.drafts.create\",\n\t\/\/ \"mediaUpload\": {\n\t\/\/ \"accept\": [\n\t\/\/ \"message\/rfc822\"\n\t\/\/ ],\n\t\/\/ \"maxSize\": \"35MB\",\n\t\/\/ \"protocols\": {\n\t\/\/ \"resumable\": {\n\t\/\/ \"multipart\": true,\n\t\/\/ \"path\": \"\/resumable\/upload\/gmail\/v1\/users\/{userId}\/drafts\"\n\t\/\/ },\n\t\/\/ \"simple\": {\n\t\/\/ \"multipart\": true,\n\t\/\/ \"path\": \"\/upload\/gmail\/v1\/users\/{userId}\/drafts\"\n\t\/\/ }\n\t\/\/ }\n\t\/\/ },\n\t\/\/ \"parameterOrder\": [\n\t\/\/ \"userId\"\n\t\/\/ ],\n\t\/\/ \"parameters\": {\n\t\/\/ \"userId\": {\n\t\/\/ \"default\": \"me\",\n\t\/\/ \"description\": \"The user's email address. The special value me can be used to indicate the authenticated user.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ }\n\t\/\/ },\n\t\/\/ \"path\": \"{userId}\/drafts\",\n\t\/\/ \"request\": {\n\t\/\/ \"$ref\": \"Draft\"\n\t\/\/ },\n\t\/\/ \"response\": {\n\t\/\/ \"$ref\": \"Draft\"\n\t\/\/ },\n\t\/\/ \"scopes\": [\n\t\/\/ \"https:\/\/mail.google.com\/\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.compose\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.modify\"\n\t\/\/ ],\n\t\/\/ \"supportsMediaUpload\": true\n\t\/\/ }\n\n}\n\n\/\/ method id \"gmail.users.drafts.delete\":\n\ntype UsersDraftsDeleteCall struct {\n\ts *Service\n\tuserId string\n\tid string\n\topt_ map[string]interface{}\n}\n\n\/\/ Delete: Immediately and permanently deletes the specified draft. Does\n\/\/ not simply trash it.\nfunc (r *UsersDraftsService) Delete(userId string, id string) *UsersDraftsDeleteCall {\n\tc := &UsersDraftsDeleteCall{s: r.s, opt_: make(map[string]interface{})}\n\tc.userId = userId\n\tc.id = id\n\treturn c\n}\n\n\/\/ Fields allows partial responses to be retrieved.\n\/\/ See https:\/\/developers.google.com\/gdata\/docs\/2.0\/basics#PartialResponse\n\/\/ for more information.\nfunc (c *UsersDraftsDeleteCall) Fields(s ...googleapi.Field) *UsersDraftsDeleteCall {\n\tc.opt_[\"fields\"] = googleapi.CombineFields(s)\n\treturn c\n}\n\nfunc (c *UsersDraftsDeleteCall) doRequest(alt string) (*http.Response, error) {\n\tvar body io.Reader = nil\n\tparams := make(url.Values)\n\tparams.Set(\"alt\", alt)\n\tif v, ok := c.opt_[\"fields\"]; ok {\n\t\tparams.Set(\"fields\", fmt.Sprintf(\"%v\", v))\n\t}\n\turls := googleapi.ResolveRelative(c.s.BasePath, \"{userId}\/drafts\/{id}\")\n\turls += \"?\" + params.Encode()\n\treq, _ := http.NewRequest(\"DELETE\", urls, body)\n\tgoogleapi.Expand(req.URL, map[string]string{\n\t\t\"userId\": c.userId,\n\t\t\"id\": c.id,\n\t})\n\treq.Header.Set(\"User-Agent\", c.s.userAgent())\n\treturn c.s.client.Do(req)\n}\n\nfunc (c *UsersDraftsDeleteCall) Do() error {\n\tres, err := c.doRequest(\"json\")\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer googleapi.CloseBody(res)\n\tif err := googleapi.CheckResponse(res); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n\t\/\/ {\n\t\/\/ \"description\": \"Immediately and permanently deletes the specified draft. Does not simply trash it.\",\n\t\/\/ \"httpMethod\": \"DELETE\",\n\t\/\/ \"id\": \"gmail.users.drafts.delete\",\n\t\/\/ \"parameterOrder\": [\n\t\/\/ \"userId\",\n\t\/\/ \"id\"\n\t\/\/ ],\n\t\/\/ \"parameters\": {\n\t\/\/ \"id\": {\n\t\/\/ \"description\": \"The ID of the draft to delete.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ },\n\t\/\/ \"userId\": {\n\t\/\/ \"default\": \"me\",\n\t\/\/ \"description\": \"The user's email address. The special value me can be used to indicate the authenticated user.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ }\n\t\/\/ },\n\t\/\/ \"path\": \"{userId}\/drafts\/{id}\",\n\t\/\/ \"scopes\": [\n\t\/\/ \"https:\/\/mail.google.com\/\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.compose\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.modify\"\n\t\/\/ ]\n\t\/\/ }\n\n}\n\n\/\/ method id \"gmail.users.drafts.get\":\n\ntype UsersDraftsGetCall struct {\n\ts *Service\n\tuserId string\n\tid string\n\topt_ map[string]interface{}\n}\n\n\/\/ Get: Gets the specified draft.\nfunc (r *UsersDraftsService) Get(userId string, id string) *UsersDraftsGetCall {\n\tc := &UsersDraftsGetCall{s: r.s, opt_: make(map[string]interface{})}\n\tc.userId = userId\n\tc.id = id\n\treturn c\n}\n\n\/\/ Format sets the optional parameter \"format\": The format to return the\n\/\/ draft in.\n\/\/\n\/\/ Possible values:\n\/\/ \"full\" (default)\n\/\/ \"metadata\"\n\/\/ \"minimal\"\n\/\/ \"raw\"\nfunc (c *UsersDraftsGetCall) Format(format string) *UsersDraftsGetCall {\n\tc.opt_[\"format\"] = format\n\treturn c\n}\n\n\/\/ Fields allows partial responses to be retrieved.\n\/\/ See https:\/\/developers.google.com\/gdata\/docs\/2.0\/basics#PartialResponse\n\/\/ for more information.\nfunc (c *UsersDraftsGetCall) Fields(s ...googleapi.Field) *UsersDraftsGetCall {\n\tc.opt_[\"fields\"] = googleapi.CombineFields(s)\n\treturn c\n}\n\nfunc (c *UsersDraftsGetCall) doRequest(alt string) (*http.Response, error) {\n\tvar body io.Reader = nil\n\tparams := make(url.Values)\n\tparams.Set(\"alt\", alt)\n\tif v, ok := c.opt_[\"format\"]; ok {\n\t\tparams.Set(\"format\", fmt.Sprintf(\"%v\", v))\n\t}\n\tif v, ok := c.opt_[\"fields\"]; ok {\n\t\tparams.Set(\"fields\", fmt.Sprintf(\"%v\", v))\n\t}\n\turls := googleapi.ResolveRelative(c.s.BasePath, \"{userId}\/drafts\/{id}\")\n\turls += \"?\" + params.Encode()\n\treq, _ := http.NewRequest(\"GET\", urls, body)\n\tgoogleapi.Expand(req.URL, map[string]string{\n\t\t\"userId\": c.userId,\n\t\t\"id\": c.id,\n\t})\n\treq.Header.Set(\"User-Agent\", c.s.userAgent())\n\treturn c.s.client.Do(req)\n}\n\nfunc (c *UsersDraftsGetCall) Do() (*Draft, error) {\n\tres, err := c.doRequest(\"json\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer googleapi.CloseBody(res)\n\tif err := googleapi.CheckResponse(res); err != nil {\n\t\treturn nil, err\n\t}\n\tvar ret *Draft\n\tif err := json.NewDecoder(res.Body).Decode(&ret); err != nil {\n\t\treturn nil, err\n\t}\n\treturn ret, nil\n\t\/\/ {\n\t\/\/ \"description\": \"Gets the specified draft.\",\n\t\/\/ \"httpMethod\": \"GET\",\n\t\/\/ \"id\": \"gmail.users.drafts.get\",\n\t\/\/ \"parameterOrder\": [\n\t\/\/ \"userId\",\n\t\/\/ \"id\"\n\t\/\/ ],\n\t\/\/ \"parameters\": {\n\t\/\/ \"format\": {\n\t\/\/ \"default\": \"full\",\n\t\/\/ \"description\": \"The format to return the draft in.\",\n\t\/\/ \"enum\": [\n\t\/\/ \"full\",\n\t\/\/ \"metadata\",\n\t\/\/ \"minimal\",\n\t\/\/ \"raw\"\n\t\/\/ ],\n\t\/\/ \"enumDescriptions\": [\n\t\/\/ \"\",\n\t\/\/ \"\",\n\t\/\/ \"\",\n\t\/\/ \"\"\n\t\/\/ ],\n\t\/\/ \"location\": \"query\",\n\t\/\/ \"type\": \"string\"\n\t\/\/ },\n\t\/\/ \"id\": {\n\t\/\/ \"description\": \"The ID of the draft to retrieve.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ },\n\t\/\/ \"userId\": {\n\t\/\/ \"default\": \"me\",\n\t\/\/ \"description\": \"The user's email address. The special value me can be used to indicate the authenticated user.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ }\n\t\/\/ },\n\t\/\/ \"path\": \"{userId}\/drafts\/{id}\",\n\t\/\/ \"response\": {\n\t\/\/ \"$ref\": \"Draft\"\n\t\/\/ },\n\t\/\/ \"scopes\": [\n\t\/\/ \"https:\/\/mail.google.com\/\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.compose\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.modify\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.readonly\"\n\t\/\/ ]\n\t\/\/ }\n\n}\n\n\/\/ method id \"gmail.users.drafts.list\":\n\ntype UsersDraftsListCall struct {\n\ts *Service\n\tuserId string\n\topt_ map[string]interface{}\n}\n\n\/\/ List: Lists the drafts in the user's mailbox.\nfunc (r *UsersDraftsService) List(userId string) *UsersDraftsListCall {\n\tc := &UsersDraftsListCall{s: r.s, opt_: make(map[string]interface{})}\n\tc.userId = userId\n\treturn c\n}\n\n\/\/ MaxResults sets the optional parameter \"maxResults\": Maximum number\n\/\/ of drafts to return.\nfunc (c *UsersDraftsListCall) MaxResults(maxResults int64) *UsersDraftsListCall {\n\tc.opt_[\"maxResults\"] = maxResults\n\treturn c\n}\n\n\/\/ PageToken sets the optional parameter \"pageToken\": Page token to\n\/\/ retrieve a specific page of results in the list.\nfunc (c *UsersDraftsListCall) PageToken(pageToken string) *UsersDraftsListCall {\n\tc.opt_[\"pageToken\"] = pageToken\n\treturn c\n}\n\n\/\/ Fields allows partial responses to be retrieved.\n\/\/ See https:\/\/developers.google.com\/gdata\/docs\/2.0\/basics#PartialResponse\n\/\/ for more information.\nfunc (c *UsersDraftsListCall) Fields(s ...googleapi.Field) *UsersDraftsListCall {\n\tc.opt_[\"fields\"] = googleapi.CombineFields(s)\n\treturn c\n}\n\nfunc (c *UsersDraftsListCall) doRequest(alt string) (*http.Response, error) {\n\tvar body io.Reader = nil\n\tparams := make(url.Values)\n\tparams.Set(\"alt\", alt)\n\tif v, ok := c.opt_[\"maxResults\"]; ok {\n\t\tparams.Set(\"maxResults\", fmt.Sprintf(\"%v\", v))\n\t}\n\tif v, ok := c.opt_[\"pageToken\"]; ok {\n\t\tparams.Set(\"pageToken\", fmt.Sprintf(\"%v\", v))\n\t}\n\tif v, ok := c.opt_[\"fields\"]; ok {\n\t\tparams.Set(\"fields\", fmt.Sprintf(\"%v\", v))\n\t}\n\turls := googleapi.ResolveRelative(c.s.BasePath, \"{userId}\/drafts\")\n\turls += \"?\" + params.Encode()\n\treq, _ := http.NewRequest(\"GET\", urls, body)\n\tgoogleapi.Expand(req.URL, map[string]string{\n\t\t\"userId\": c.userId,\n\t})\n\treq.Header.Set(\"User-Agent\", c.s.userAgent())\n\treturn c.s.client.Do(req)\n}\n\nfunc (c *UsersDraftsListCall) Do() (*ListDraftsResponse, error) {\n\tres, err := c.doRequest(\"json\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer googleapi.CloseBody(res)\n\tif err := googleapi.CheckResponse(res); err != nil {\n\t\treturn nil, err\n\t}\n\tvar ret *ListDraftsResponse\n\tif err := json.NewDecoder(res.Body).Decode(&ret); err != nil {\n\t\treturn nil, err\n\t}\n\treturn ret, nil\n\t\/\/ {\n\t\/\/ \"description\": \"Lists the drafts in the user's mailbox.\",\n\t\/\/ \"httpMethod\": \"GET\",\n\t\/\/ \"id\": \"gmail.users.drafts.list\",\n\t\/\/ \"parameterOrder\": [\n\t\/\/ \"userId\"\n\t\/\/ ],\n\t\/\/ \"parameters\": {\n\t\/\/ \"maxResults\": {\n\t\/\/ \"default\": \"100\",\n\t\/\/ \"description\": \"Maximum number of drafts to return.\",\n\t\/\/ \"format\": \"uint32\",\n\t\/\/ \"location\": \"query\",\n\t\/\/ \"type\": \"integer\"\n\t\/\/ },\n\t\/\/ \"pageToken\": {\n\t\/\/ \"description\": \"Page token to retrieve a specific page of results in the list.\",\n\t\/\/ \"location\": \"query\",\n\t\/\/ \"type\": \"string\"\n\t\/\/ },\n\t\/\/ \"userId\": {\n\t\/\/ \"default\": \"me\",\n\t\/\/ \"description\": \"The user's email address. The special value me can be used to indicate the authenticated user.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ }\n\t\/\/ },\n\t\/\/ \"path\": \"{userId}\/drafts\",\n\t\/\/ \"response\": {\n\t\/\/ \"$ref\": \"ListDraftsResponse\"\n\t\/\/ },\n\t\/\/ \"scopes\": [\n\t\/\/ \"https:\/\/mail.google.com\/\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.compose\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.modify\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.readonly\"\n\t\/\/ ]\n\t\/\/ }\n\n}\n\n\/\/ method id \"gmail.users.drafts.send\":\n\ntype UsersDraftsSendCall struct {\n\ts *Service\n\tuserId string\n\tdraft *Draft\n\topt_ map[string]interface{}\n\tmedia_ io.Reader\n\tresumable_ googleapi.SizeReaderAt\n\tmediaType_ string\n\tctx_ context.Context\n\tprotocol_ string\n}\n\n\/\/ Send: Sends the specified, existing draft to the recipients in the\n\/\/ To, Cc, and Bcc headers.\nfunc (r *UsersDraftsService) Send(userId string, draft *Draft) *UsersDraftsSendCall {\n\tc := &UsersDraftsSendCall{s: r.s, opt_: make(map[string]interface{})}\n\tc.userId = userId\n\tc.draft = draft\n\treturn c\n}\n\n\/\/ Media specifies the media to upload in a single chunk.\n\/\/ At most one of Media and ResumableMedia may be set.\nfunc (c *UsersDraftsSendCall) Media(r io.Reader) *UsersDraftsSendCall {\n\tc.media_ = r\n\tc.protocol_ = \"multipart\"\n\treturn c\n}\n\n\/\/ ResumableMedia specifies the media to upload in chunks and can be cancelled with ctx.\n\/\/ At most one of Media and ResumableMedia may be set.\n\/\/ mediaType identifies the MIME media type of the upload, such as \"image\/png\".\n\/\/ If mediaType is \"\", it will be auto-detected.\nfunc (c *UsersDraftsSendCall) ResumableMedia(ctx context.Context, r io.ReaderAt, size int64, mediaType string) *UsersDraftsSendCall {\n\tc.ctx_ = ctx\n\tc.resumable_ = io.NewSectionReader(r, 0, size)\n\tc.mediaType_ = mediaType\n\tc.protocol_ = \"resumable\"\n\treturn c\n}\n\n\/\/ ProgressUpdater provides a callback function that will be called after every chunk.\n\/\/ It should be a low-latency function in order to not slow down the upload operation.\n\/\/ This should only be called when using ResumableMedia (as opposed to Media).\nfunc (c *UsersDraftsSendCall) ProgressUpdater(pu googleapi.ProgressUpdater) *UsersDraftsSendCall {\n\tc.opt_[\"progressUpdater\"] = pu\n\treturn c\n}\n\n\/\/ Fields allows partial responses to be retrieved.\n\/\/ See https:\/\/developers.google.com\/gdata\/docs\/2.0\/basics#PartialResponse\n\/\/ for more information.\nfunc (c *UsersDraftsSendCall) Fields(s ...googleapi.Field) *UsersDraftsSendCall {\n\tc.opt_[\"fields\"] = googleapi.CombineFields(s)\n\treturn c\n}\n\nfunc (c *UsersDraftsSendCall) doRequest(alt string) (*http.Response, error) {\n\tvar body io.Reader = nil\n\tbody, err := googleapi.WithoutDataWrapper.JSONReader(c.draft)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tctype := \"application\/json\"\n\tparams := make(url.Values)\n\tparams.Set(\"alt\", alt)\n\tif v, ok := c.opt_[\"fields\"]; ok {\n\t\tparams.Set(\"fields\", fmt.Sprintf(\"%v\", v))\n\t}\n\turls := googleapi.ResolveRelative(c.s.BasePath, \"{userId}\/drafts\/send\")\n\tif c.media_ != nil || c.resumable_ != nil {\n\t\turls = strings.Replace(urls, \"https:\/\/www.googleapis.com\/\", \"https:\/\/www.googleapis.com\/upload\/\", 1)\n\t\tparams.Set(\"uploadType\", c.protocol_)\n\t}\n\turls += \"?\" + params.Encode()\n\tif c.protocol_ != \"resumable\" {\n\t\tvar cancel func()\n\t\tcancel, _ = googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)\n\t\tif cancel != nil {\n\t\t\tdefer cancel()\n\t\t}\n\t}\n\treq, _ := http.NewRequest(\"POST\", urls, body)\n\tgoogleapi.Expand(req.URL, map[string]string{\n\t\t\"userId\": c.userId,\n\t})\n\tif c.protocol_ == \"resumable\" {\n\t\tif c.mediaType_ == \"\" {\n\t\t\tc.mediaType_ = googleapi.DetectMediaType(c.resumable_)\n\t\t}\n\t\treq.Header.Set(\"X-Upload-Content-Type\", c.mediaType_)\n\t\treq.Header.Set(\"Content-Type\", \"application\/json; charset=utf-8\")\n\t} else {\n\t\treq.Header.Set(\"Content-Type\", ctype)\n\t}\n\treq.Header.Set(\"User-Agent\", c.s.userAgent())\n\treturn c.s.client.Do(req)\n}\n\nfunc (c *UsersDraftsSendCall) Do() (*Message, error) {\n\tres, err := c.doRequest(\"json\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer googleapi.CloseBody(res)\n\tif err := googleapi.CheckResponse(res); err != nil {\n\t\treturn nil, err\n\t}\n\tvar progressUpdater_ googleapi.ProgressUpdater\n\tif v, ok := c.opt_[\"progressUpdater\"]; ok {\n\t\tif pu, ok := v.(googleapi.ProgressUpdater); ok {\n\t\t\tprogressUpdater_ = pu\n\t\t}\n\t}\n\tif c.protocol_ == \"resumable\" {\n\t\tloc := res.Header.Get(\"Location\")\n\t\trx := &googleapi.ResumableUpload{\n\t\t\tClient: c.s.client,\n\t\t\tUserAgent: c.s.userAgent(),\n\t\t\tURI: loc,\n\t\t\tMedia: c.resumable_,\n\t\t\tMediaType: c.mediaType_,\n\t\t\tContentLength: c.resumable_.Size(),\n\t\t\tCallback: progressUpdater_,\n\t\t}\n\t\tres, err = rx.Upload(c.ctx_)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tdefer res.Body.Close()\n\t}\n\tvar ret *Message\n\tif err := json.NewDecoder(res.Body).Decode(&ret); err != nil {\n\t\treturn nil, err\n\t}\n\treturn ret, nil\n\t\/\/ {\n\t\/\/ \"description\": \"Sends the specified, existing draft to the recipients in the To, Cc, and Bcc headers.\",\n\t\/\/ \"httpMethod\": \"POST\",\n\t\/\/ \"id\": \"gmail.users.drafts.send\",\n\t\/\/ \"mediaUpload\": {\n\t\/\/ \"accept\": [\n\t\/\/ \"message\/rfc822\"\n\t\/\/ ],\n\t\/\/ \"maxSize\": \"35MB\",\n\t\/\/ \"protocols\": {\n\t\/\/ \"resumable\": {\n\t\/\/ \"multipart\": true,\n\t\/\/ \"path\": \"\/resumable\/upload\/gmail\/v1\/users\/{userId}\/drafts\/send\"\n\t\/\/ },\n\t\/\/ \"simple\": {\n\t\/\/ \"multipart\": true,\n\t\/\/ \"path\": \"\/upload\/gmail\/v1\/users\/{userId}\/drafts\/send\"\n\t\/\/ }\n\t\/\/ }\n\t\/\/ },\n\t\/\/ \"parameterOrder\": [\n\t\/\/ \"userId\"\n\t\/\/ ],\n\t\/\/ \"parameters\": {\n\t\/\/ \"userId\": {\n\t\/\/ \"default\": \"me\",\n\t\/\/ \"description\": \"The user's email address. The special value me can be used to indicate the authenticated user.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ }\n\t\/\/ },\n\t\/\/ \"path\": \"{userId}\/drafts\/send\",\n\t\/\/ \"request\": {\n\t\/\/ \"$ref\": \"Draft\"\n\t\/\/ },\n\t\/\/ \"response\": {\n\t\/\/ \"$ref\": \"Message\"\n\t\/\/ },\n\t\/\/ \"scopes\": [\n\t\/\/ \"https:\/\/mail.google.com\/\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.compose\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.modify\"\n\t\/\/ ],\n\t\/\/ \"supportsMediaUpload\": true\n\t\/\/ }\n\n}\n\n\/\/ method id \"gmail.users.drafts.update\":\n\ntype UsersDraftsUpdateCall struct {\n\ts *Service\n\tuserId string\n\tid string\n\tdraft *Draft\n\topt_ map[string]interface{}\n\tmedia_ io.Reader\n\tresumable_ googleapi.SizeReaderAt\n\tmediaType_ string\n\tctx_ context.Context\n\tprotocol_ string\n}\n\n\/\/ Update: Replaces a draft's content.\nfunc (r *UsersDraftsService) Update(userId string, id string, draft *Draft) *UsersDraftsUpdateCall {\n\tc := &UsersDraftsUpdateCall{s: r.s, opt_: make(map[string]interface{})}\n\tc.userId = userId\n\tc.id = id\n\tc.draft = draft\n\treturn c\n}\n\n\/\/ Media specifies the media to upload in a single chunk.\n\/\/ At most one of Media and ResumableMedia may be set.\nfunc (c *UsersDraftsUpdateCall) Media(r io.Reader) *UsersDraftsUpdateCall {\n\tc.media_ = r\n\tc.protocol_ = \"multipart\"\n\treturn c\n}\n\n\/\/ ResumableMedia specifies the media to upload in chunks and can be cancelled with ctx.\n\/\/ At most one of Media and ResumableMedia may be set.\n\/\/ mediaType identifies the MIME media type of the upload, such as \"image\/png\".\n\/\/ If mediaType is \"\", it will be auto-detected.\nfunc (c *UsersDraftsUpdateCall) ResumableMedia(ctx context.Context, r io.ReaderAt, size int64, mediaType string) *UsersDraftsUpdateCall {\n\tc.ctx_ = ctx\n\tc.resumable_ = io.NewSectionReader(r, 0, size)\n\tc.mediaType_ = mediaType\n\tc.protocol_ = \"resumable\"\n\treturn c\n}\n\n\/\/ ProgressUpdater provides a callback function that will be called after every chunk.\n\/\/ It should be a low-latency function in order to not slow down the upload operation.\n\/\/ This should only be called when using ResumableMedia (as opposed to Media).\nfunc (c *UsersDraftsUpdateCall) ProgressUpdater(pu googleapi.ProgressUpdater) *UsersDraftsUpdateCall {\n\tc.opt_[\"progressUpdater\"] = pu\n\treturn c\n}\n\n\/\/ Fields allows partial responses to be retrieved.\n\/\/ See https:\/\/developers.google.com\/gdata\/docs\/2.0\/basics#PartialResponse\n\/\/ for more information.\nfunc (c *UsersDraftsUpdateCall) Fields(s ...googleapi.Field) *UsersDraftsUpdateCall {\n\tc.opt_[\"fields\"] = googleapi.CombineFields(s)\n\treturn c\n}\n\nfunc (c *UsersDraftsUpdateCall) doRequest(alt string) (*http.Response, error) {\n\tvar body io.Reader = nil\n\tbody, err := googleapi.WithoutDataWrapper.JSONReader(c.draft)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tctype := \"application\/json\"\n\tparams := make(url.Values)\n\tparams.Set(\"alt\", alt)\n\tif v, ok := c.opt_[\"fields\"]; ok {\n\t\tparams.Set(\"fields\", fmt.Sprintf(\"%v\", v))\n\t}\n\turls := googleapi.ResolveRelative(c.s.BasePath, \"{userId}\/drafts\/{id}\")\n\tif c.media_ != nil || c.resumable_ != nil {\n\t\turls = strings.Replace(urls, \"https:\/\/www.googleapis.com\/\", \"https:\/\/www.googleapis.com\/upload\/\", 1)\n\t\tparams.Set(\"uploadType\", c.protocol_)\n\t}\n\turls += \"?\" + params.Encode()\n\tif c.protocol_ != \"resumable\" {\n\t\tvar cancel func()\n\t\tcancel, _ = googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)\n\t\tif cancel != nil {\n\t\t\tdefer cancel()\n\t\t}\n\t}\n\treq, _ := http.NewRequest(\"PUT\", urls, body)\n\tgoogleapi.Expand(req.URL, map[string]string{\n\t\t\"userId\": c.userId,\n\t\t\"id\": c.id,\n\t})\n\tif c.protocol_ == \"resumable\" {\n\t\tif c.mediaType_ == \"\" {\n\t\t\tc.mediaType_ = googleapi.DetectMediaType(c.resumable_)\n\t\t}\n\t\treq.Header.Set(\"X-Upload-Content-Type\", c.mediaType_)\n\t\treq.Header.Set(\"Content-Type\", \"application\/json; charset=utf-8\")\n\t} else {\n\t\treq.Header.Set(\"Content-Type\", ctype)\n\t}\n\treq.Header.Set(\"User-Agent\", c.s.userAgent())\n\treturn c.s.client.Do(req)\n}\n\nfunc (c *UsersDraftsUpdateCall) Do() (*Draft, error) {\n\tres, err := c.doRequest(\"json\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer googleapi.CloseBody(res)\n\tif err := googleapi.CheckResponse(res); err != nil {\n\t\treturn nil, err\n\t}\n\tvar progressUpdater_ googleapi.ProgressUpdater\n\tif v, ok := c.opt_[\"progressUpdater\"]; ok {\n\t\tif pu, ok := v.(googleapi.ProgressUpdater); ok {\n\t\t\tprogressUpdater_ = pu\n\t\t}\n\t}\n\tif c.protocol_ == \"resumable\" {\n\t\tloc := res.Header.Get(\"Location\")\n\t\trx := &googleapi.ResumableUpload{\n\t\t\tClient: c.s.client,\n\t\t\tUserAgent: c.s.userAgent(),\n\t\t\tURI: loc,\n\t\t\tMedia: c.resumable_,\n\t\t\tMediaType: c.mediaType_,\n\t\t\tContentLength: c.resumable_.Size(),\n\t\t\tCallback: progressUpdater_,\n\t\t}\n\t\tres, err = rx.Upload(c.ctx_)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tdefer res.Body.Close()\n\t}\n\tvar ret *Draft\n\tif err := json.NewDecoder(res.Body).Decode(&ret); err != nil {\n\t\treturn nil, err\n\t}\n\treturn ret, nil\n\t\/\/ {\n\t\/\/ \"description\": \"Replaces a draft's content.\",\n\t\/\/ \"httpMethod\": \"PUT\",\n\t\/\/ \"id\": \"gmail.users.drafts.update\",\n\t\/\/ \"mediaUpload\": {\n\t\/\/ \"accept\": [\n\t\/\/ \"message\/rfc822\"\n\t\/\/ ],\n\t\/\/ \"maxSize\": \"35MB\",\n\t\/\/ \"protocols\": {\n\t\/\/ \"resumable\": {\n\t\/\/ \"multipart\": true,\n\t\/\/ \"path\": \"\/resumable\/upload\/gmail\/v1\/users\/{userId}\/drafts\/{id}\"\n\t\/\/ },\n\t\/\/ \"simple\": {\n\t\/\/ \"multipart\": true,\n\t\/\/ \"path\": \"\/upload\/gmail\/v1\/users\/{userId}\/drafts\/{id}\"\n\t\/\/ }\n\t\/\/ }\n\t\/\/ },\n\t\/\/ \"parameterOrder\": [\n\t\/\/ \"userId\",\n\t\/\/ \"id\"\n\t\/\/ ],\n\t\/\/ \"parameters\": {\n\t\/\/ \"id\": {\n\t\/\/ \"description\": \"The ID of the draft to update.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ },\n\t\/\/ \"userId\": {\n\t\/\/ \"default\": \"me\",\n\t\/\/ \"description\": \"The user's email address. The special value me can be used to indicate the authenticated user.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ }\n\t\/\/ },\n\t\/\/ \"path\": \"{userId}\/drafts\/{id}\",\n\t\/\/ \"request\": {\n\t\/\/ \"$ref\": \"Draft\"\n\t\/\/ },\n\t\/\/ \"response\": {\n\t\/\/ \"$ref\": \"Draft\"\n\t\/\/ },\n\t\/\/ \"scopes\": [\n\t\/\/ \"https:\/\/mail.google.com\/\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.compose\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.modify\"\n\t\/\/ ],\n\t\/\/ \"supportsMediaUpload\": true\n\t\/\/ }\n\n}\n\n\/\/ method id \"gmail.users.history.list\":\n\ntype UsersHistoryListCall struct {\n\ts *Service\n\tuserId string\n\topt_ map[string]interface{}\n}\n\n\/\/ List: Lists the history of all changes to the given mailbox. History\n\/\/ results are returned in chronological order (increasing historyId).\nfunc (r *UsersHistoryService) List(userId string) *UsersHistoryListCall {\n\tc := &UsersHistoryListCall{s: r.s, opt_: make(map[string]interface{})}\n\tc.userId = userId\n\treturn c\n}\n\n\/\/ LabelId sets the optional parameter \"labelId\": Only return messages\n\/\/ with a label matching the ID.\nfunc (c *UsersHistoryListCall) LabelId(labelId string) *UsersHistoryListCall {\n\tc.opt_[\"labelId\"] = labelId\n\treturn c\n}\n\n\/\/ MaxResults sets the optional parameter \"maxResults\": The maximum\n\/\/ number of history records to return.\nfunc (c *UsersHistoryListCall) MaxResults(maxResults int64) *UsersHistoryListCall {\n\tc.opt_[\"maxResults\"] = maxResults\n\treturn c\n}\n\n\/\/ PageToken sets the optional parameter \"pageToken\": Page token to\n\/\/ retrieve a specific page of results in the list.\nfunc (c *UsersHistoryListCall) PageToken(pageToken string) *UsersHistoryListCall {\n\tc.opt_[\"pageToken\"] = pageToken\n\treturn c\n}\n\n\/\/ StartHistoryId sets the optional parameter \"startHistoryId\":\n\/\/ Required. Returns history records after the specified startHistoryId.\n\/\/ The supplied startHistoryId should be obtained from the historyId of\n\/\/ a message, thread, or previous list response. History IDs increase\n\/\/ chronologically but are not contiguous with random gaps in between\n\/\/ valid IDs. Supplying an invalid or out of date startHistoryId\n\/\/ typically returns an HTTP 404 error code. A historyId is typically\n\/\/ valid for at least a week, but in some rare circumstances may be\n\/\/ valid for only a few hours. If you receive an HTTP 404 error\n\/\/ response, your application should perform a full sync. If you receive\n\/\/ no nextPageToken in the response, there are no updates to retrieve\n\/\/ and you can store the returned historyId for a future request.\nfunc (c *UsersHistoryListCall) StartHistoryId(startHistoryId uint64) *UsersHistoryListCall {\n\tc.opt_[\"startHistoryId\"] = startHistoryId\n\treturn c\n}\n\n\/\/ Fields allows partial responses to be retrieved.\n\/\/ See https:\/\/developers.google.com\/gdata\/docs\/2.0\/basics#PartialResponse\n\/\/ for more information.\nfunc (c *UsersHistoryListCall) Fields(s ...googleapi.Field) *UsersHistoryListCall {\n\tc.opt_[\"fields\"] = googleapi.CombineFields(s)\n\treturn c\n}\n\nfunc (c *UsersHistoryListCall) doRequest(alt string) (*http.Response, error) {\n\tvar body io.Reader = nil\n\tparams := make(url.Values)\n\tparams.Set(\"alt\", alt)\n\tif v, ok := c.opt_[\"labelId\"]; ok {\n\t\tparams.Set(\"labelId\", fmt.Sprintf(\"%v\", v))\n\t}\n\tif v, ok := c.opt_[\"maxResults\"]; ok {\n\t\tparams.Set(\"maxResults\", fmt.Sprintf(\"%v\", v))\n\t}\n\tif v, ok := c.opt_[\"pageToken\"]; ok {\n\t\tparams.Set(\"pageToken\", fmt.Sprintf(\"%v\", v))\n\t}\n\tif v, ok := c.opt_[\"startHistoryId\"]; ok {\n\t\tparams.Set(\"startHistoryId\", fmt.Sprintf(\"%v\", v))\n\t}\n\tif v, ok := c.opt_[\"fields\"]; ok {\n\t\tparams.Set(\"fields\", fmt.Sprintf(\"%v\", v))\n\t}\n\turls := googleapi.ResolveRelative(c.s.BasePath, \"{userId}\/history\")\n\turls += \"?\" + params.Encode()\n\treq, _ := http.NewRequest(\"GET\", urls, body)\n\tgoogleapi.Expand(req.URL, map[string]string{\n\t\t\"userId\": c.userId,\n\t})\n\treq.Header.Set(\"User-Agent\", c.s.userAgent())\n\treturn c.s.client.Do(req)\n}\n\nfunc (c *UsersHistoryListCall) Do() (*ListHistoryResponse, error) {\n\tres, err := c.doRequest(\"json\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer googleapi.CloseBody(res)\n\tif err := googleapi.CheckResponse(res); err != nil {\n\t\treturn nil, err\n\t}\n\tvar ret *ListHistoryResponse\n\tif err := json.NewDecoder(res.Body).Decode(&ret); err != nil {\n\t\treturn nil, err\n\t}\n\treturn ret, nil\n\t\/\/ {\n\t\/\/ \"description\": \"Lists the history of all changes to the given mailbox. History results are returned in chronological order (increasing historyId).\",\n\t\/\/ \"httpMethod\": \"GET\",\n\t\/\/ \"id\": \"gmail.users.history.list\",\n\t\/\/ \"parameterOrder\": [\n\t\/\/ \"userId\"\n\t\/\/ ],\n\t\/\/ \"parameters\": {\n\t\/\/ \"labelId\": {\n\t\/\/ \"description\": \"Only return messages with a label matching the ID.\",\n\t\/\/ \"location\": \"query\",\n\t\/\/ \"type\": \"string\"\n\t\/\/ },\n\t\/\/ \"maxResults\": {\n\t\/\/ \"default\": \"100\",\n\t\/\/ \"description\": \"The maximum number of history records to return.\",\n\t\/\/ \"format\": \"uint32\",\n\t\/\/ \"location\": \"query\",\n\t\/\/ \"type\": \"integer\"\n\t\/\/ },\n\t\/\/ \"pageToken\": {\n\t\/\/ \"description\": \"Page token to retrieve a specific page of results in the list.\",\n\t\/\/ \"location\": \"query\",\n\t\/\/ \"type\": \"string\"\n\t\/\/ },\n\t\/\/ \"startHistoryId\": {\n\t\/\/ \"description\": \"Required. Returns history records after the specified startHistoryId. The supplied startHistoryId should be obtained from the historyId of a message, thread, or previous list response. History IDs increase chronologically but are not contiguous with random gaps in between valid IDs. Supplying an invalid or out of date startHistoryId typically returns an HTTP 404 error code. A historyId is typically valid for at least a week, but in some rare circumstances may be valid for only a few hours. If you receive an HTTP 404 error response, your application should perform a full sync. If you receive no nextPageToken in the response, there are no updates to retrieve and you can store the returned historyId for a future request.\",\n\t\/\/ \"format\": \"uint64\",\n\t\/\/ \"location\": \"query\",\n\t\/\/ \"type\": \"string\"\n\t\/\/ },\n\t\/\/ \"userId\": {\n\t\/\/ \"default\": \"me\",\n\t\/\/ \"description\": \"The user's email address. The special value me can be used to indicate the authenticated user.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ }\n\t\/\/ },\n\t\/\/ \"path\": \"{userId}\/history\",\n\t\/\/ \"response\": {\n\t\/\/ \"$ref\": \"ListHistoryResponse\"\n\t\/\/ },\n\t\/\/ \"scopes\": [\n\t\/\/ \"https:\/\/mail.google.com\/\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.modify\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.readonly\"\n\t\/\/ ]\n\t\/\/ }\n\n}\n\n\/\/ method id \"gmail.users.labels.create\":\n\ntype UsersLabelsCreateCall struct {\n\ts *Service\n\tuserId string\n\tlabel *Label\n\topt_ map[string]interface{}\n}\n\n\/\/ Create: Creates a new label.\nfunc (r *UsersLabelsService) Create(userId string, label *Label) *UsersLabelsCreateCall {\n\tc := &UsersLabelsCreateCall{s: r.s, opt_: make(map[string]interface{})}\n\tc.userId = userId\n\tc.label = label\n\treturn c\n}\n\n\/\/ Fields allows partial responses to be retrieved.\n\/\/ See https:\/\/developers.google.com\/gdata\/docs\/2.0\/basics#PartialResponse\n\/\/ for more information.\nfunc (c *UsersLabelsCreateCall) Fields(s ...googleapi.Field) *UsersLabelsCreateCall {\n\tc.opt_[\"fields\"] = googleapi.CombineFields(s)\n\treturn c\n}\n\nfunc (c *UsersLabelsCreateCall) doRequest(alt string) (*http.Response, error) {\n\tvar body io.Reader = nil\n\tbody, err := googleapi.WithoutDataWrapper.JSONReader(c.label)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tctype := \"application\/json\"\n\tparams := make(url.Values)\n\tparams.Set(\"alt\", alt)\n\tif v, ok := c.opt_[\"fields\"]; ok {\n\t\tparams.Set(\"fields\", fmt.Sprintf(\"%v\", v))\n\t}\n\turls := googleapi.ResolveRelative(c.s.BasePath, \"{userId}\/labels\")\n\turls += \"?\" + params.Encode()\n\treq, _ := http.NewRequest(\"POST\", urls, body)\n\tgoogleapi.Expand(req.URL, map[string]string{\n\t\t\"userId\": c.userId,\n\t})\n\treq.Header.Set(\"Content-Type\", ctype)\n\treq.Header.Set(\"User-Agent\", c.s.userAgent())\n\treturn c.s.client.Do(req)\n}\n\nfunc (c *UsersLabelsCreateCall) Do() (*Label, error) {\n\tres, err := c.doRequest(\"json\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer googleapi.CloseBody(res)\n\tif err := googleapi.CheckResponse(res); err != nil {\n\t\treturn nil, err\n\t}\n\tvar ret *Label\n\tif err := json.NewDecoder(res.Body).Decode(&ret); err != nil {\n\t\treturn nil, err\n\t}\n\treturn ret, nil\n\t\/\/ {\n\t\/\/ \"description\": \"Creates a new label.\",\n\t\/\/ \"httpMethod\": \"POST\",\n\t\/\/ \"id\": \"gmail.users.labels.create\",\n\t\/\/ \"parameterOrder\": [\n\t\/\/ \"userId\"\n\t\/\/ ],\n\t\/\/ \"parameters\": {\n\t\/\/ \"userId\": {\n\t\/\/ \"default\": \"me\",\n\t\/\/ \"description\": \"The user's email address. The special value me can be used to indicate the authenticated user.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ }\n\t\/\/ },\n\t\/\/ \"path\": \"{userId}\/labels\",\n\t\/\/ \"request\": {\n\t\/\/ \"$ref\": \"Label\"\n\t\/\/ },\n\t\/\/ \"response\": {\n\t\/\/ \"$ref\": \"Label\"\n\t\/\/ },\n\t\/\/ \"scopes\": [\n\t\/\/ \"https:\/\/mail.google.com\/\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.labels\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.modify\"\n\t\/\/ ]\n\t\/\/ }\n\n}\n\n\/\/ method id \"gmail.users.labels.delete\":\n\ntype UsersLabelsDeleteCall struct {\n\ts *Service\n\tuserId string\n\tid string\n\topt_ map[string]interface{}\n}\n\n\/\/ Delete: Immediately and permanently deletes the specified label and\n\/\/ removes it from any messages and threads that it is applied to.\nfunc (r *UsersLabelsService) Delete(userId string, id string) *UsersLabelsDeleteCall {\n\tc := &UsersLabelsDeleteCall{s: r.s, opt_: make(map[string]interface{})}\n\tc.userId = userId\n\tc.id = id\n\treturn c\n}\n\n\/\/ Fields allows partial responses to be retrieved.\n\/\/ See https:\/\/developers.google.com\/gdata\/docs\/2.0\/basics#PartialResponse\n\/\/ for more information.\nfunc (c *UsersLabelsDeleteCall) Fields(s ...googleapi.Field) *UsersLabelsDeleteCall {\n\tc.opt_[\"fields\"] = googleapi.CombineFields(s)\n\treturn c\n}\n\nfunc (c *UsersLabelsDeleteCall) doRequest(alt string) (*http.Response, error) {\n\tvar body io.Reader = nil\n\tparams := make(url.Values)\n\tparams.Set(\"alt\", alt)\n\tif v, ok := c.opt_[\"fields\"]; ok {\n\t\tparams.Set(\"fields\", fmt.Sprintf(\"%v\", v))\n\t}\n\turls := googleapi.ResolveRelative(c.s.BasePath, \"{userId}\/labels\/{id}\")\n\turls += \"?\" + params.Encode()\n\treq, _ := http.NewRequest(\"DELETE\", urls, body)\n\tgoogleapi.Expand(req.URL, map[string]string{\n\t\t\"userId\": c.userId,\n\t\t\"id\": c.id,\n\t})\n\treq.Header.Set(\"User-Agent\", c.s.userAgent())\n\treturn c.s.client.Do(req)\n}\n\nfunc (c *UsersLabelsDeleteCall) Do() error {\n\tres, err := c.doRequest(\"json\")\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer googleapi.CloseBody(res)\n\tif err := googleapi.CheckResponse(res); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n\t\/\/ {\n\t\/\/ \"description\": \"Immediately and permanently deletes the specified label and removes it from any messages and threads that it is applied to.\",\n\t\/\/ \"httpMethod\": \"DELETE\",\n\t\/\/ \"id\": \"gmail.users.labels.delete\",\n\t\/\/ \"parameterOrder\": [\n\t\/\/ \"userId\",\n\t\/\/ \"id\"\n\t\/\/ ],\n\t\/\/ \"parameters\": {\n\t\/\/ \"id\": {\n\t\/\/ \"description\": \"The ID of the label to delete.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ },\n\t\/\/ \"userId\": {\n\t\/\/ \"default\": \"me\",\n\t\/\/ \"description\": \"The user's email address. The special value me can be used to indicate the authenticated user.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ }\n\t\/\/ },\n\t\/\/ \"path\": \"{userId}\/labels\/{id}\",\n\t\/\/ \"scopes\": [\n\t\/\/ \"https:\/\/mail.google.com\/\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.labels\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.modify\"\n\t\/\/ ]\n\t\/\/ }\n\n}\n\n\/\/ method id \"gmail.users.labels.get\":\n\ntype UsersLabelsGetCall struct {\n\ts *Service\n\tuserId string\n\tid string\n\topt_ map[string]interface{}\n}\n\n\/\/ Get: Gets the specified label.\nfunc (r *UsersLabelsService) Get(userId string, id string) *UsersLabelsGetCall {\n\tc := &UsersLabelsGetCall{s: r.s, opt_: make(map[string]interface{})}\n\tc.userId = userId\n\tc.id = id\n\treturn c\n}\n\n\/\/ Fields allows partial responses to be retrieved.\n\/\/ See https:\/\/developers.google.com\/gdata\/docs\/2.0\/basics#PartialResponse\n\/\/ for more information.\nfunc (c *UsersLabelsGetCall) Fields(s ...googleapi.Field) *UsersLabelsGetCall {\n\tc.opt_[\"fields\"] = googleapi.CombineFields(s)\n\treturn c\n}\n\nfunc (c *UsersLabelsGetCall) doRequest(alt string) (*http.Response, error) {\n\tvar body io.Reader = nil\n\tparams := make(url.Values)\n\tparams.Set(\"alt\", alt)\n\tif v, ok := c.opt_[\"fields\"]; ok {\n\t\tparams.Set(\"fields\", fmt.Sprintf(\"%v\", v))\n\t}\n\turls := googleapi.ResolveRelative(c.s.BasePath, \"{userId}\/labels\/{id}\")\n\turls += \"?\" + params.Encode()\n\treq, _ := http.NewRequest(\"GET\", urls, body)\n\tgoogleapi.Expand(req.URL, map[string]string{\n\t\t\"userId\": c.userId,\n\t\t\"id\": c.id,\n\t})\n\treq.Header.Set(\"User-Agent\", c.s.userAgent())\n\treturn c.s.client.Do(req)\n}\n\nfunc (c *UsersLabelsGetCall) Do() (*Label, error) {\n\tres, err := c.doRequest(\"json\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer googleapi.CloseBody(res)\n\tif err := googleapi.CheckResponse(res); err != nil {\n\t\treturn nil, err\n\t}\n\tvar ret *Label\n\tif err := json.NewDecoder(res.Body).Decode(&ret); err != nil {\n\t\treturn nil, err\n\t}\n\treturn ret, nil\n\t\/\/ {\n\t\/\/ \"description\": \"Gets the specified label.\",\n\t\/\/ \"httpMethod\": \"GET\",\n\t\/\/ \"id\": \"gmail.users.labels.get\",\n\t\/\/ \"parameterOrder\": [\n\t\/\/ \"userId\",\n\t\/\/ \"id\"\n\t\/\/ ],\n\t\/\/ \"parameters\": {\n\t\/\/ \"id\": {\n\t\/\/ \"description\": \"The ID of the label to retrieve.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ },\n\t\/\/ \"userId\": {\n\t\/\/ \"default\": \"me\",\n\t\/\/ \"description\": \"The user's email address. The special value me can be used to indicate the authenticated user.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ }\n\t\/\/ },\n\t\/\/ \"path\": \"{userId}\/labels\/{id}\",\n\t\/\/ \"response\": {\n\t\/\/ \"$ref\": \"Label\"\n\t\/\/ },\n\t\/\/ \"scopes\": [\n\t\/\/ \"https:\/\/mail.google.com\/\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.labels\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.modify\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.readonly\"\n\t\/\/ ]\n\t\/\/ }\n\n}\n\n\/\/ method id \"gmail.users.labels.list\":\n\ntype UsersLabelsListCall struct {\n\ts *Service\n\tuserId string\n\topt_ map[string]interface{}\n}\n\n\/\/ List: Lists all labels in the user's mailbox.\nfunc (r *UsersLabelsService) List(userId string) *UsersLabelsListCall {\n\tc := &UsersLabelsListCall{s: r.s, opt_: make(map[string]interface{})}\n\tc.userId = userId\n\treturn c\n}\n\n\/\/ Fields allows partial responses to be retrieved.\n\/\/ See https:\/\/developers.google.com\/gdata\/docs\/2.0\/basics#PartialResponse\n\/\/ for more information.\nfunc (c *UsersLabelsListCall) Fields(s ...googleapi.Field) *UsersLabelsListCall {\n\tc.opt_[\"fields\"] = googleapi.CombineFields(s)\n\treturn c\n}\n\nfunc (c *UsersLabelsListCall) doRequest(alt string) (*http.Response, error) {\n\tvar body io.Reader = nil\n\tparams := make(url.Values)\n\tparams.Set(\"alt\", alt)\n\tif v, ok := c.opt_[\"fields\"]; ok {\n\t\tparams.Set(\"fields\", fmt.Sprintf(\"%v\", v))\n\t}\n\turls := googleapi.ResolveRelative(c.s.BasePath, \"{userId}\/labels\")\n\turls += \"?\" + params.Encode()\n\treq, _ := http.NewRequest(\"GET\", urls, body)\n\tgoogleapi.Expand(req.URL, map[string]string{\n\t\t\"userId\": c.userId,\n\t})\n\treq.Header.Set(\"User-Agent\", c.s.userAgent())\n\treturn c.s.client.Do(req)\n}\n\nfunc (c *UsersLabelsListCall) Do() (*ListLabelsResponse, error) {\n\tres, err := c.doRequest(\"json\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer googleapi.CloseBody(res)\n\tif err := googleapi.CheckResponse(res); err != nil {\n\t\treturn nil, err\n\t}\n\tvar ret *ListLabelsResponse\n\tif err := json.NewDecoder(res.Body).Decode(&ret); err != nil {\n\t\treturn nil, err\n\t}\n\treturn ret, nil\n\t\/\/ {\n\t\/\/ \"description\": \"Lists all labels in the user's mailbox.\",\n\t\/\/ \"httpMethod\": \"GET\",\n\t\/\/ \"id\": \"gmail.users.labels.list\",\n\t\/\/ \"parameterOrder\": [\n\t\/\/ \"userId\"\n\t\/\/ ],\n\t\/\/ \"parameters\": {\n\t\/\/ \"userId\": {\n\t\/\/ \"default\": \"me\",\n\t\/\/ \"description\": \"The user's email address. The special value me can be used to indicate the authenticated user.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ }\n\t\/\/ },\n\t\/\/ \"path\": \"{userId}\/labels\",\n\t\/\/ \"response\": {\n\t\/\/ \"$ref\": \"ListLabelsResponse\"\n\t\/\/ },\n\t\/\/ \"scopes\": [\n\t\/\/ \"https:\/\/mail.google.com\/\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.labels\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.modify\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.readonly\"\n\t\/\/ ]\n\t\/\/ }\n\n}\n\n\/\/ method id \"gmail.users.labels.patch\":\n\ntype UsersLabelsPatchCall struct {\n\ts *Service\n\tuserId string\n\tid string\n\tlabel *Label\n\topt_ map[string]interface{}\n}\n\n\/\/ Patch: Updates the specified label. This method supports patch\n\/\/ semantics.\nfunc (r *UsersLabelsService) Patch(userId string, id string, label *Label) *UsersLabelsPatchCall {\n\tc := &UsersLabelsPatchCall{s: r.s, opt_: make(map[string]interface{})}\n\tc.userId = userId\n\tc.id = id\n\tc.label = label\n\treturn c\n}\n\n\/\/ Fields allows partial responses to be retrieved.\n\/\/ See https:\/\/developers.google.com\/gdata\/docs\/2.0\/basics#PartialResponse\n\/\/ for more information.\nfunc (c *UsersLabelsPatchCall) Fields(s ...googleapi.Field) *UsersLabelsPatchCall {\n\tc.opt_[\"fields\"] = googleapi.CombineFields(s)\n\treturn c\n}\n\nfunc (c *UsersLabelsPatchCall) doRequest(alt string) (*http.Response, error) {\n\tvar body io.Reader = nil\n\tbody, err := googleapi.WithoutDataWrapper.JSONReader(c.label)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tctype := \"application\/json\"\n\tparams := make(url.Values)\n\tparams.Set(\"alt\", alt)\n\tif v, ok := c.opt_[\"fields\"]; ok {\n\t\tparams.Set(\"fields\", fmt.Sprintf(\"%v\", v))\n\t}\n\turls := googleapi.ResolveRelative(c.s.BasePath, \"{userId}\/labels\/{id}\")\n\turls += \"?\" + params.Encode()\n\treq, _ := http.NewRequest(\"PATCH\", urls, body)\n\tgoogleapi.Expand(req.URL, map[string]string{\n\t\t\"userId\": c.userId,\n\t\t\"id\": c.id,\n\t})\n\treq.Header.Set(\"Content-Type\", ctype)\n\treq.Header.Set(\"User-Agent\", c.s.userAgent())\n\treturn c.s.client.Do(req)\n}\n\nfunc (c *UsersLabelsPatchCall) Do() (*Label, error) {\n\tres, err := c.doRequest(\"json\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer googleapi.CloseBody(res)\n\tif err := googleapi.CheckResponse(res); err != nil {\n\t\treturn nil, err\n\t}\n\tvar ret *Label\n\tif err := json.NewDecoder(res.Body).Decode(&ret); err != nil {\n\t\treturn nil, err\n\t}\n\treturn ret, nil\n\t\/\/ {\n\t\/\/ \"description\": \"Updates the specified label. This method supports patch semantics.\",\n\t\/\/ \"httpMethod\": \"PATCH\",\n\t\/\/ \"id\": \"gmail.users.labels.patch\",\n\t\/\/ \"parameterOrder\": [\n\t\/\/ \"userId\",\n\t\/\/ \"id\"\n\t\/\/ ],\n\t\/\/ \"parameters\": {\n\t\/\/ \"id\": {\n\t\/\/ \"description\": \"The ID of the label to update.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ },\n\t\/\/ \"userId\": {\n\t\/\/ \"default\": \"me\",\n\t\/\/ \"description\": \"The user's email address. The special value me can be used to indicate the authenticated user.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ }\n\t\/\/ },\n\t\/\/ \"path\": \"{userId}\/labels\/{id}\",\n\t\/\/ \"request\": {\n\t\/\/ \"$ref\": \"Label\"\n\t\/\/ },\n\t\/\/ \"response\": {\n\t\/\/ \"$ref\": \"Label\"\n\t\/\/ },\n\t\/\/ \"scopes\": [\n\t\/\/ \"https:\/\/mail.google.com\/\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.labels\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.modify\"\n\t\/\/ ]\n\t\/\/ }\n\n}\n\n\/\/ method id \"gmail.users.labels.update\":\n\ntype UsersLabelsUpdateCall struct {\n\ts *Service\n\tuserId string\n\tid string\n\tlabel *Label\n\topt_ map[string]interface{}\n}\n\n\/\/ Update: Updates the specified label.\nfunc (r *UsersLabelsService) Update(userId string, id string, label *Label) *UsersLabelsUpdateCall {\n\tc := &UsersLabelsUpdateCall{s: r.s, opt_: make(map[string]interface{})}\n\tc.userId = userId\n\tc.id = id\n\tc.label = label\n\treturn c\n}\n\n\/\/ Fields allows partial responses to be retrieved.\n\/\/ See https:\/\/developers.google.com\/gdata\/docs\/2.0\/basics#PartialResponse\n\/\/ for more information.\nfunc (c *UsersLabelsUpdateCall) Fields(s ...googleapi.Field) *UsersLabelsUpdateCall {\n\tc.opt_[\"fields\"] = googleapi.CombineFields(s)\n\treturn c\n}\n\nfunc (c *UsersLabelsUpdateCall) doRequest(alt string) (*http.Response, error) {\n\tvar body io.Reader = nil\n\tbody, err := googleapi.WithoutDataWrapper.JSONReader(c.label)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tctype := \"application\/json\"\n\tparams := make(url.Values)\n\tparams.Set(\"alt\", alt)\n\tif v, ok := c.opt_[\"fields\"]; ok {\n\t\tparams.Set(\"fields\", fmt.Sprintf(\"%v\", v))\n\t}\n\turls := googleapi.ResolveRelative(c.s.BasePath, \"{userId}\/labels\/{id}\")\n\turls += \"?\" + params.Encode()\n\treq, _ := http.NewRequest(\"PUT\", urls, body)\n\tgoogleapi.Expand(req.URL, map[string]string{\n\t\t\"userId\": c.userId,\n\t\t\"id\": c.id,\n\t})\n\treq.Header.Set(\"Content-Type\", ctype)\n\treq.Header.Set(\"User-Agent\", c.s.userAgent())\n\treturn c.s.client.Do(req)\n}\n\nfunc (c *UsersLabelsUpdateCall) Do() (*Label, error) {\n\tres, err := c.doRequest(\"json\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer googleapi.CloseBody(res)\n\tif err := googleapi.CheckResponse(res); err != nil {\n\t\treturn nil, err\n\t}\n\tvar ret *Label\n\tif err := json.NewDecoder(res.Body).Decode(&ret); err != nil {\n\t\treturn nil, err\n\t}\n\treturn ret, nil\n\t\/\/ {\n\t\/\/ \"description\": \"Updates the specified label.\",\n\t\/\/ \"httpMethod\": \"PUT\",\n\t\/\/ \"id\": \"gmail.users.labels.update\",\n\t\/\/ \"parameterOrder\": [\n\t\/\/ \"userId\",\n\t\/\/ \"id\"\n\t\/\/ ],\n\t\/\/ \"parameters\": {\n\t\/\/ \"id\": {\n\t\/\/ \"description\": \"The ID of the label to update.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ },\n\t\/\/ \"userId\": {\n\t\/\/ \"default\": \"me\",\n\t\/\/ \"description\": \"The user's email address. The special value me can be used to indicate the authenticated user.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ }\n\t\/\/ },\n\t\/\/ \"path\": \"{userId}\/labels\/{id}\",\n\t\/\/ \"request\": {\n\t\/\/ \"$ref\": \"Label\"\n\t\/\/ },\n\t\/\/ \"response\": {\n\t\/\/ \"$ref\": \"Label\"\n\t\/\/ },\n\t\/\/ \"scopes\": [\n\t\/\/ \"https:\/\/mail.google.com\/\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.labels\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.modify\"\n\t\/\/ ]\n\t\/\/ }\n\n}\n\n\/\/ method id \"gmail.users.messages.delete\":\n\ntype UsersMessagesDeleteCall struct {\n\ts *Service\n\tuserId string\n\tid string\n\topt_ map[string]interface{}\n}\n\n\/\/ Delete: Immediately and permanently deletes the specified message.\n\/\/ This operation cannot be undone. Prefer messages.trash instead.\nfunc (r *UsersMessagesService) Delete(userId string, id string) *UsersMessagesDeleteCall {\n\tc := &UsersMessagesDeleteCall{s: r.s, opt_: make(map[string]interface{})}\n\tc.userId = userId\n\tc.id = id\n\treturn c\n}\n\n\/\/ Fields allows partial responses to be retrieved.\n\/\/ See https:\/\/developers.google.com\/gdata\/docs\/2.0\/basics#PartialResponse\n\/\/ for more information.\nfunc (c *UsersMessagesDeleteCall) Fields(s ...googleapi.Field) *UsersMessagesDeleteCall {\n\tc.opt_[\"fields\"] = googleapi.CombineFields(s)\n\treturn c\n}\n\nfunc (c *UsersMessagesDeleteCall) doRequest(alt string) (*http.Response, error) {\n\tvar body io.Reader = nil\n\tparams := make(url.Values)\n\tparams.Set(\"alt\", alt)\n\tif v, ok := c.opt_[\"fields\"]; ok {\n\t\tparams.Set(\"fields\", fmt.Sprintf(\"%v\", v))\n\t}\n\turls := googleapi.ResolveRelative(c.s.BasePath, \"{userId}\/messages\/{id}\")\n\turls += \"?\" + params.Encode()\n\treq, _ := http.NewRequest(\"DELETE\", urls, body)\n\tgoogleapi.Expand(req.URL, map[string]string{\n\t\t\"userId\": c.userId,\n\t\t\"id\": c.id,\n\t})\n\treq.Header.Set(\"User-Agent\", c.s.userAgent())\n\treturn c.s.client.Do(req)\n}\n\nfunc (c *UsersMessagesDeleteCall) Do() error {\n\tres, err := c.doRequest(\"json\")\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer googleapi.CloseBody(res)\n\tif err := googleapi.CheckResponse(res); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n\t\/\/ {\n\t\/\/ \"description\": \"Immediately and permanently deletes the specified message. This operation cannot be undone. Prefer messages.trash instead.\",\n\t\/\/ \"httpMethod\": \"DELETE\",\n\t\/\/ \"id\": \"gmail.users.messages.delete\",\n\t\/\/ \"parameterOrder\": [\n\t\/\/ \"userId\",\n\t\/\/ \"id\"\n\t\/\/ ],\n\t\/\/ \"parameters\": {\n\t\/\/ \"id\": {\n\t\/\/ \"description\": \"The ID of the message to delete.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ },\n\t\/\/ \"userId\": {\n\t\/\/ \"default\": \"me\",\n\t\/\/ \"description\": \"The user's email address. The special value me can be used to indicate the authenticated user.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ }\n\t\/\/ },\n\t\/\/ \"path\": \"{userId}\/messages\/{id}\",\n\t\/\/ \"scopes\": [\n\t\/\/ \"https:\/\/mail.google.com\/\"\n\t\/\/ ]\n\t\/\/ }\n\n}\n\n\/\/ method id \"gmail.users.messages.get\":\n\ntype UsersMessagesGetCall struct {\n\ts *Service\n\tuserId string\n\tid string\n\topt_ map[string]interface{}\n}\n\n\/\/ Get: Gets the specified message.\nfunc (r *UsersMessagesService) Get(userId string, id string) *UsersMessagesGetCall {\n\tc := &UsersMessagesGetCall{s: r.s, opt_: make(map[string]interface{})}\n\tc.userId = userId\n\tc.id = id\n\treturn c\n}\n\n\/\/ Format sets the optional parameter \"format\": The format to return the\n\/\/ message in.\n\/\/\n\/\/ Possible values:\n\/\/ \"full\" (default)\n\/\/ \"metadata\"\n\/\/ \"minimal\"\n\/\/ \"raw\"\nfunc (c *UsersMessagesGetCall) Format(format string) *UsersMessagesGetCall {\n\tc.opt_[\"format\"] = format\n\treturn c\n}\n\n\/\/ MetadataHeaders sets the optional parameter \"metadataHeaders\": When\n\/\/ given and format is METADATA, only include headers specified.\nfunc (c *UsersMessagesGetCall) MetadataHeaders(metadataHeaders string) *UsersMessagesGetCall {\n\tc.opt_[\"metadataHeaders\"] = metadataHeaders\n\treturn c\n}\n\n\/\/ Fields allows partial responses to be retrieved.\n\/\/ See https:\/\/developers.google.com\/gdata\/docs\/2.0\/basics#PartialResponse\n\/\/ for more information.\nfunc (c *UsersMessagesGetCall) Fields(s ...googleapi.Field) *UsersMessagesGetCall {\n\tc.opt_[\"fields\"] = googleapi.CombineFields(s)\n\treturn c\n}\n\nfunc (c *UsersMessagesGetCall) doRequest(alt string) (*http.Response, error) {\n\tvar body io.Reader = nil\n\tparams := make(url.Values)\n\tparams.Set(\"alt\", alt)\n\tif v, ok := c.opt_[\"format\"]; ok {\n\t\tparams.Set(\"format\", fmt.Sprintf(\"%v\", v))\n\t}\n\tif v, ok := c.opt_[\"metadataHeaders\"]; ok {\n\t\tparams.Set(\"metadataHeaders\", fmt.Sprintf(\"%v\", v))\n\t}\n\tif v, ok := c.opt_[\"fields\"]; ok {\n\t\tparams.Set(\"fields\", fmt.Sprintf(\"%v\", v))\n\t}\n\turls := googleapi.ResolveRelative(c.s.BasePath, \"{userId}\/messages\/{id}\")\n\turls += \"?\" + params.Encode()\n\treq, _ := http.NewRequest(\"GET\", urls, body)\n\tgoogleapi.Expand(req.URL, map[string]string{\n\t\t\"userId\": c.userId,\n\t\t\"id\": c.id,\n\t})\n\treq.Header.Set(\"User-Agent\", c.s.userAgent())\n\treturn c.s.client.Do(req)\n}\n\nfunc (c *UsersMessagesGetCall) Do() (*Message, error) {\n\tres, err := c.doRequest(\"json\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer googleapi.CloseBody(res)\n\tif err := googleapi.CheckResponse(res); err != nil {\n\t\treturn nil, err\n\t}\n\tvar ret *Message\n\tif err := json.NewDecoder(res.Body).Decode(&ret); err != nil {\n\t\treturn nil, err\n\t}\n\treturn ret, nil\n\t\/\/ {\n\t\/\/ \"description\": \"Gets the specified message.\",\n\t\/\/ \"httpMethod\": \"GET\",\n\t\/\/ \"id\": \"gmail.users.messages.get\",\n\t\/\/ \"parameterOrder\": [\n\t\/\/ \"userId\",\n\t\/\/ \"id\"\n\t\/\/ ],\n\t\/\/ \"parameters\": {\n\t\/\/ \"format\": {\n\t\/\/ \"default\": \"full\",\n\t\/\/ \"description\": \"The format to return the message in.\",\n\t\/\/ \"enum\": [\n\t\/\/ \"full\",\n\t\/\/ \"metadata\",\n\t\/\/ \"minimal\",\n\t\/\/ \"raw\"\n\t\/\/ ],\n\t\/\/ \"enumDescriptions\": [\n\t\/\/ \"\",\n\t\/\/ \"\",\n\t\/\/ \"\",\n\t\/\/ \"\"\n\t\/\/ ],\n\t\/\/ \"location\": \"query\",\n\t\/\/ \"type\": \"string\"\n\t\/\/ },\n\t\/\/ \"id\": {\n\t\/\/ \"description\": \"The ID of the message to retrieve.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ },\n\t\/\/ \"metadataHeaders\": {\n\t\/\/ \"description\": \"When given and format is METADATA, only include headers specified.\",\n\t\/\/ \"location\": \"query\",\n\t\/\/ \"repeated\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ },\n\t\/\/ \"userId\": {\n\t\/\/ \"default\": \"me\",\n\t\/\/ \"description\": \"The user's email address. The special value me can be used to indicate the authenticated user.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ }\n\t\/\/ },\n\t\/\/ \"path\": \"{userId}\/messages\/{id}\",\n\t\/\/ \"response\": {\n\t\/\/ \"$ref\": \"Message\"\n\t\/\/ },\n\t\/\/ \"scopes\": [\n\t\/\/ \"https:\/\/mail.google.com\/\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.modify\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.readonly\"\n\t\/\/ ]\n\t\/\/ }\n\n}\n\n\/\/ method id \"gmail.users.messages.import\":\n\ntype UsersMessagesImportCall struct {\n\ts *Service\n\tuserId string\n\tmessage *Message\n\topt_ map[string]interface{}\n\tmedia_ io.Reader\n\tresumable_ googleapi.SizeReaderAt\n\tmediaType_ string\n\tctx_ context.Context\n\tprotocol_ string\n}\n\n\/\/ Import: Imports a message into only this user's mailbox, with\n\/\/ standard email delivery scanning and classification similar to\n\/\/ receiving via SMTP. Does not send a message.\nfunc (r *UsersMessagesService) Import(userId string, message *Message) *UsersMessagesImportCall {\n\tc := &UsersMessagesImportCall{s: r.s, opt_: make(map[string]interface{})}\n\tc.userId = userId\n\tc.message = message\n\treturn c\n}\n\n\/\/ Deleted sets the optional parameter \"deleted\": Mark the email as\n\/\/ permanently deleted (not TRASH) and only visible in Google Apps Vault\n\/\/ to a Vault administrator. Only used for Google Apps for Work\n\/\/ accounts.\nfunc (c *UsersMessagesImportCall) Deleted(deleted bool) *UsersMessagesImportCall {\n\tc.opt_[\"deleted\"] = deleted\n\treturn c\n}\n\n\/\/ InternalDateSource sets the optional parameter \"internalDateSource\":\n\/\/ Source for Gmail's internal date of the message.\n\/\/\n\/\/ Possible values:\n\/\/ \"dateHeader\" (default)\n\/\/ \"receivedTime\"\nfunc (c *UsersMessagesImportCall) InternalDateSource(internalDateSource string) *UsersMessagesImportCall {\n\tc.opt_[\"internalDateSource\"] = internalDateSource\n\treturn c\n}\n\n\/\/ NeverMarkSpam sets the optional parameter \"neverMarkSpam\": Ignore the\n\/\/ Gmail spam classifier decision and never mark this email as SPAM in\n\/\/ the mailbox.\nfunc (c *UsersMessagesImportCall) NeverMarkSpam(neverMarkSpam bool) *UsersMessagesImportCall {\n\tc.opt_[\"neverMarkSpam\"] = neverMarkSpam\n\treturn c\n}\n\n\/\/ ProcessForCalendar sets the optional parameter \"processForCalendar\":\n\/\/ Process calendar invites in the email and add any extracted meetings\n\/\/ to the Google Calendar for this user.\nfunc (c *UsersMessagesImportCall) ProcessForCalendar(processForCalendar bool) *UsersMessagesImportCall {\n\tc.opt_[\"processForCalendar\"] = processForCalendar\n\treturn c\n}\n\n\/\/ Media specifies the media to upload in a single chunk.\n\/\/ At most one of Media and ResumableMedia may be set.\nfunc (c *UsersMessagesImportCall) Media(r io.Reader) *UsersMessagesImportCall {\n\tc.media_ = r\n\tc.protocol_ = \"multipart\"\n\treturn c\n}\n\n\/\/ ResumableMedia specifies the media to upload in chunks and can be cancelled with ctx.\n\/\/ At most one of Media and ResumableMedia may be set.\n\/\/ mediaType identifies the MIME media type of the upload, such as \"image\/png\".\n\/\/ If mediaType is \"\", it will be auto-detected.\nfunc (c *UsersMessagesImportCall) ResumableMedia(ctx context.Context, r io.ReaderAt, size int64, mediaType string) *UsersMessagesImportCall {\n\tc.ctx_ = ctx\n\tc.resumable_ = io.NewSectionReader(r, 0, size)\n\tc.mediaType_ = mediaType\n\tc.protocol_ = \"resumable\"\n\treturn c\n}\n\n\/\/ ProgressUpdater provides a callback function that will be called after every chunk.\n\/\/ It should be a low-latency function in order to not slow down the upload operation.\n\/\/ This should only be called when using ResumableMedia (as opposed to Media).\nfunc (c *UsersMessagesImportCall) ProgressUpdater(pu googleapi.ProgressUpdater) *UsersMessagesImportCall {\n\tc.opt_[\"progressUpdater\"] = pu\n\treturn c\n}\n\n\/\/ Fields allows partial responses to be retrieved.\n\/\/ See https:\/\/developers.google.com\/gdata\/docs\/2.0\/basics#PartialResponse\n\/\/ for more information.\nfunc (c *UsersMessagesImportCall) Fields(s ...googleapi.Field) *UsersMessagesImportCall {\n\tc.opt_[\"fields\"] = googleapi.CombineFields(s)\n\treturn c\n}\n\nfunc (c *UsersMessagesImportCall) doRequest(alt string) (*http.Response, error) {\n\tvar body io.Reader = nil\n\tbody, err := googleapi.WithoutDataWrapper.JSONReader(c.message)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tctype := \"application\/json\"\n\tparams := make(url.Values)\n\tparams.Set(\"alt\", alt)\n\tif v, ok := c.opt_[\"deleted\"]; ok {\n\t\tparams.Set(\"deleted\", fmt.Sprintf(\"%v\", v))\n\t}\n\tif v, ok := c.opt_[\"internalDateSource\"]; ok {\n\t\tparams.Set(\"internalDateSource\", fmt.Sprintf(\"%v\", v))\n\t}\n\tif v, ok := c.opt_[\"neverMarkSpam\"]; ok {\n\t\tparams.Set(\"neverMarkSpam\", fmt.Sprintf(\"%v\", v))\n\t}\n\tif v, ok := c.opt_[\"processForCalendar\"]; ok {\n\t\tparams.Set(\"processForCalendar\", fmt.Sprintf(\"%v\", v))\n\t}\n\tif v, ok := c.opt_[\"fields\"]; ok {\n\t\tparams.Set(\"fields\", fmt.Sprintf(\"%v\", v))\n\t}\n\turls := googleapi.ResolveRelative(c.s.BasePath, \"{userId}\/messages\/import\")\n\tif c.media_ != nil || c.resumable_ != nil {\n\t\turls = strings.Replace(urls, \"https:\/\/www.googleapis.com\/\", \"https:\/\/www.googleapis.com\/upload\/\", 1)\n\t\tparams.Set(\"uploadType\", c.protocol_)\n\t}\n\turls += \"?\" + params.Encode()\n\tif c.protocol_ != \"resumable\" {\n\t\tvar cancel func()\n\t\tcancel, _ = googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)\n\t\tif cancel != nil {\n\t\t\tdefer cancel()\n\t\t}\n\t}\n\treq, _ := http.NewRequest(\"POST\", urls, body)\n\tgoogleapi.Expand(req.URL, map[string]string{\n\t\t\"userId\": c.userId,\n\t})\n\tif c.protocol_ == \"resumable\" {\n\t\tif c.mediaType_ == \"\" {\n\t\t\tc.mediaType_ = googleapi.DetectMediaType(c.resumable_)\n\t\t}\n\t\treq.Header.Set(\"X-Upload-Content-Type\", c.mediaType_)\n\t\treq.Header.Set(\"Content-Type\", \"application\/json; charset=utf-8\")\n\t} else {\n\t\treq.Header.Set(\"Content-Type\", ctype)\n\t}\n\treq.Header.Set(\"User-Agent\", c.s.userAgent())\n\treturn c.s.client.Do(req)\n}\n\nfunc (c *UsersMessagesImportCall) Do() (*Message, error) {\n\tres, err := c.doRequest(\"json\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer googleapi.CloseBody(res)\n\tif err := googleapi.CheckResponse(res); err != nil {\n\t\treturn nil, err\n\t}\n\tvar progressUpdater_ googleapi.ProgressUpdater\n\tif v, ok := c.opt_[\"progressUpdater\"]; ok {\n\t\tif pu, ok := v.(googleapi.ProgressUpdater); ok {\n\t\t\tprogressUpdater_ = pu\n\t\t}\n\t}\n\tif c.protocol_ == \"resumable\" {\n\t\tloc := res.Header.Get(\"Location\")\n\t\trx := &googleapi.ResumableUpload{\n\t\t\tClient: c.s.client,\n\t\t\tUserAgent: c.s.userAgent(),\n\t\t\tURI: loc,\n\t\t\tMedia: c.resumable_,\n\t\t\tMediaType: c.mediaType_,\n\t\t\tContentLength: c.resumable_.Size(),\n\t\t\tCallback: progressUpdater_,\n\t\t}\n\t\tres, err = rx.Upload(c.ctx_)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tdefer res.Body.Close()\n\t}\n\tvar ret *Message\n\tif err := json.NewDecoder(res.Body).Decode(&ret); err != nil {\n\t\treturn nil, err\n\t}\n\treturn ret, nil\n\t\/\/ {\n\t\/\/ \"description\": \"Imports a message into only this user's mailbox, with standard email delivery scanning and classification similar to receiving via SMTP. Does not send a message.\",\n\t\/\/ \"httpMethod\": \"POST\",\n\t\/\/ \"id\": \"gmail.users.messages.import\",\n\t\/\/ \"mediaUpload\": {\n\t\/\/ \"accept\": [\n\t\/\/ \"message\/rfc822\"\n\t\/\/ ],\n\t\/\/ \"maxSize\": \"35MB\",\n\t\/\/ \"protocols\": {\n\t\/\/ \"resumable\": {\n\t\/\/ \"multipart\": true,\n\t\/\/ \"path\": \"\/resumable\/upload\/gmail\/v1\/users\/{userId}\/messages\/import\"\n\t\/\/ },\n\t\/\/ \"simple\": {\n\t\/\/ \"multipart\": true,\n\t\/\/ \"path\": \"\/upload\/gmail\/v1\/users\/{userId}\/messages\/import\"\n\t\/\/ }\n\t\/\/ }\n\t\/\/ },\n\t\/\/ \"parameterOrder\": [\n\t\/\/ \"userId\"\n\t\/\/ ],\n\t\/\/ \"parameters\": {\n\t\/\/ \"deleted\": {\n\t\/\/ \"default\": \"false\",\n\t\/\/ \"description\": \"Mark the email as permanently deleted (not TRASH) and only visible in Google Apps Vault to a Vault administrator. Only used for Google Apps for Work accounts.\",\n\t\/\/ \"location\": \"query\",\n\t\/\/ \"type\": \"boolean\"\n\t\/\/ },\n\t\/\/ \"internalDateSource\": {\n\t\/\/ \"default\": \"dateHeader\",\n\t\/\/ \"description\": \"Source for Gmail's internal date of the message.\",\n\t\/\/ \"enum\": [\n\t\/\/ \"dateHeader\",\n\t\/\/ \"receivedTime\"\n\t\/\/ ],\n\t\/\/ \"enumDescriptions\": [\n\t\/\/ \"\",\n\t\/\/ \"\"\n\t\/\/ ],\n\t\/\/ \"location\": \"query\",\n\t\/\/ \"type\": \"string\"\n\t\/\/ },\n\t\/\/ \"neverMarkSpam\": {\n\t\/\/ \"default\": \"false\",\n\t\/\/ \"description\": \"Ignore the Gmail spam classifier decision and never mark this email as SPAM in the mailbox.\",\n\t\/\/ \"location\": \"query\",\n\t\/\/ \"type\": \"boolean\"\n\t\/\/ },\n\t\/\/ \"processForCalendar\": {\n\t\/\/ \"default\": \"false\",\n\t\/\/ \"description\": \"Process calendar invites in the email and add any extracted meetings to the Google Calendar for this user.\",\n\t\/\/ \"location\": \"query\",\n\t\/\/ \"type\": \"boolean\"\n\t\/\/ },\n\t\/\/ \"userId\": {\n\t\/\/ \"default\": \"me\",\n\t\/\/ \"description\": \"The user's email address. The special value me can be used to indicate the authenticated user.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ }\n\t\/\/ },\n\t\/\/ \"path\": \"{userId}\/messages\/import\",\n\t\/\/ \"request\": {\n\t\/\/ \"$ref\": \"Message\"\n\t\/\/ },\n\t\/\/ \"response\": {\n\t\/\/ \"$ref\": \"Message\"\n\t\/\/ },\n\t\/\/ \"scopes\": [\n\t\/\/ \"https:\/\/mail.google.com\/\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.insert\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.modify\"\n\t\/\/ ],\n\t\/\/ \"supportsMediaUpload\": true\n\t\/\/ }\n\n}\n\n\/\/ method id \"gmail.users.messages.insert\":\n\ntype UsersMessagesInsertCall struct {\n\ts *Service\n\tuserId string\n\tmessage *Message\n\topt_ map[string]interface{}\n\tmedia_ io.Reader\n\tresumable_ googleapi.SizeReaderAt\n\tmediaType_ string\n\tctx_ context.Context\n\tprotocol_ string\n}\n\n\/\/ Insert: Directly inserts a message into only this user's mailbox\n\/\/ similar to IMAP APPEND, bypassing most scanning and classification.\n\/\/ Does not send a message.\nfunc (r *UsersMessagesService) Insert(userId string, message *Message) *UsersMessagesInsertCall {\n\tc := &UsersMessagesInsertCall{s: r.s, opt_: make(map[string]interface{})}\n\tc.userId = userId\n\tc.message = message\n\treturn c\n}\n\n\/\/ Deleted sets the optional parameter \"deleted\": Mark the email as\n\/\/ permanently deleted (not TRASH) and only visible in Google Apps Vault\n\/\/ to a Vault administrator. Only used for Google Apps for Work\n\/\/ accounts.\nfunc (c *UsersMessagesInsertCall) Deleted(deleted bool) *UsersMessagesInsertCall {\n\tc.opt_[\"deleted\"] = deleted\n\treturn c\n}\n\n\/\/ InternalDateSource sets the optional parameter \"internalDateSource\":\n\/\/ Source for Gmail's internal date of the message.\n\/\/\n\/\/ Possible values:\n\/\/ \"dateHeader\"\n\/\/ \"receivedTime\" (default)\nfunc (c *UsersMessagesInsertCall) InternalDateSource(internalDateSource string) *UsersMessagesInsertCall {\n\tc.opt_[\"internalDateSource\"] = internalDateSource\n\treturn c\n}\n\n\/\/ Media specifies the media to upload in a single chunk.\n\/\/ At most one of Media and ResumableMedia may be set.\nfunc (c *UsersMessagesInsertCall) Media(r io.Reader) *UsersMessagesInsertCall {\n\tc.media_ = r\n\tc.protocol_ = \"multipart\"\n\treturn c\n}\n\n\/\/ ResumableMedia specifies the media to upload in chunks and can be cancelled with ctx.\n\/\/ At most one of Media and ResumableMedia may be set.\n\/\/ mediaType identifies the MIME media type of the upload, such as \"image\/png\".\n\/\/ If mediaType is \"\", it will be auto-detected.\nfunc (c *UsersMessagesInsertCall) ResumableMedia(ctx context.Context, r io.ReaderAt, size int64, mediaType string) *UsersMessagesInsertCall {\n\tc.ctx_ = ctx\n\tc.resumable_ = io.NewSectionReader(r, 0, size)\n\tc.mediaType_ = mediaType\n\tc.protocol_ = \"resumable\"\n\treturn c\n}\n\n\/\/ ProgressUpdater provides a callback function that will be called after every chunk.\n\/\/ It should be a low-latency function in order to not slow down the upload operation.\n\/\/ This should only be called when using ResumableMedia (as opposed to Media).\nfunc (c *UsersMessagesInsertCall) ProgressUpdater(pu googleapi.ProgressUpdater) *UsersMessagesInsertCall {\n\tc.opt_[\"progressUpdater\"] = pu\n\treturn c\n}\n\n\/\/ Fields allows partial responses to be retrieved.\n\/\/ See https:\/\/developers.google.com\/gdata\/docs\/2.0\/basics#PartialResponse\n\/\/ for more information.\nfunc (c *UsersMessagesInsertCall) Fields(s ...googleapi.Field) *UsersMessagesInsertCall {\n\tc.opt_[\"fields\"] = googleapi.CombineFields(s)\n\treturn c\n}\n\nfunc (c *UsersMessagesInsertCall) doRequest(alt string) (*http.Response, error) {\n\tvar body io.Reader = nil\n\tbody, err := googleapi.WithoutDataWrapper.JSONReader(c.message)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tctype := \"application\/json\"\n\tparams := make(url.Values)\n\tparams.Set(\"alt\", alt)\n\tif v, ok := c.opt_[\"deleted\"]; ok {\n\t\tparams.Set(\"deleted\", fmt.Sprintf(\"%v\", v))\n\t}\n\tif v, ok := c.opt_[\"internalDateSource\"]; ok {\n\t\tparams.Set(\"internalDateSource\", fmt.Sprintf(\"%v\", v))\n\t}\n\tif v, ok := c.opt_[\"fields\"]; ok {\n\t\tparams.Set(\"fields\", fmt.Sprintf(\"%v\", v))\n\t}\n\turls := googleapi.ResolveRelative(c.s.BasePath, \"{userId}\/messages\")\n\tif c.media_ != nil || c.resumable_ != nil {\n\t\turls = strings.Replace(urls, \"https:\/\/www.googleapis.com\/\", \"https:\/\/www.googleapis.com\/upload\/\", 1)\n\t\tparams.Set(\"uploadType\", c.protocol_)\n\t}\n\turls += \"?\" + params.Encode()\n\tif c.protocol_ != \"resumable\" {\n\t\tvar cancel func()\n\t\tcancel, _ = googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)\n\t\tif cancel != nil {\n\t\t\tdefer cancel()\n\t\t}\n\t}\n\treq, _ := http.NewRequest(\"POST\", urls, body)\n\tgoogleapi.Expand(req.URL, map[string]string{\n\t\t\"userId\": c.userId,\n\t})\n\tif c.protocol_ == \"resumable\" {\n\t\tif c.mediaType_ == \"\" {\n\t\t\tc.mediaType_ = googleapi.DetectMediaType(c.resumable_)\n\t\t}\n\t\treq.Header.Set(\"X-Upload-Content-Type\", c.mediaType_)\n\t\treq.Header.Set(\"Content-Type\", \"application\/json; charset=utf-8\")\n\t} else {\n\t\treq.Header.Set(\"Content-Type\", ctype)\n\t}\n\treq.Header.Set(\"User-Agent\", c.s.userAgent())\n\treturn c.s.client.Do(req)\n}\n\nfunc (c *UsersMessagesInsertCall) Do() (*Message, error) {\n\tres, err := c.doRequest(\"json\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer googleapi.CloseBody(res)\n\tif err := googleapi.CheckResponse(res); err != nil {\n\t\treturn nil, err\n\t}\n\tvar progressUpdater_ googleapi.ProgressUpdater\n\tif v, ok := c.opt_[\"progressUpdater\"]; ok {\n\t\tif pu, ok := v.(googleapi.ProgressUpdater); ok {\n\t\t\tprogressUpdater_ = pu\n\t\t}\n\t}\n\tif c.protocol_ == \"resumable\" {\n\t\tloc := res.Header.Get(\"Location\")\n\t\trx := &googleapi.ResumableUpload{\n\t\t\tClient: c.s.client,\n\t\t\tUserAgent: c.s.userAgent(),\n\t\t\tURI: loc,\n\t\t\tMedia: c.resumable_,\n\t\t\tMediaType: c.mediaType_,\n\t\t\tContentLength: c.resumable_.Size(),\n\t\t\tCallback: progressUpdater_,\n\t\t}\n\t\tres, err = rx.Upload(c.ctx_)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tdefer res.Body.Close()\n\t}\n\tvar ret *Message\n\tif err := json.NewDecoder(res.Body).Decode(&ret); err != nil {\n\t\treturn nil, err\n\t}\n\treturn ret, nil\n\t\/\/ {\n\t\/\/ \"description\": \"Directly inserts a message into only this user's mailbox similar to IMAP APPEND, bypassing most scanning and classification. Does not send a message.\",\n\t\/\/ \"httpMethod\": \"POST\",\n\t\/\/ \"id\": \"gmail.users.messages.insert\",\n\t\/\/ \"mediaUpload\": {\n\t\/\/ \"accept\": [\n\t\/\/ \"message\/rfc822\"\n\t\/\/ ],\n\t\/\/ \"maxSize\": \"35MB\",\n\t\/\/ \"protocols\": {\n\t\/\/ \"resumable\": {\n\t\/\/ \"multipart\": true,\n\t\/\/ \"path\": \"\/resumable\/upload\/gmail\/v1\/users\/{userId}\/messages\"\n\t\/\/ },\n\t\/\/ \"simple\": {\n\t\/\/ \"multipart\": true,\n\t\/\/ \"path\": \"\/upload\/gmail\/v1\/users\/{userId}\/messages\"\n\t\/\/ }\n\t\/\/ }\n\t\/\/ },\n\t\/\/ \"parameterOrder\": [\n\t\/\/ \"userId\"\n\t\/\/ ],\n\t\/\/ \"parameters\": {\n\t\/\/ \"deleted\": {\n\t\/\/ \"default\": \"false\",\n\t\/\/ \"description\": \"Mark the email as permanently deleted (not TRASH) and only visible in Google Apps Vault to a Vault administrator. Only used for Google Apps for Work accounts.\",\n\t\/\/ \"location\": \"query\",\n\t\/\/ \"type\": \"boolean\"\n\t\/\/ },\n\t\/\/ \"internalDateSource\": {\n\t\/\/ \"default\": \"receivedTime\",\n\t\/\/ \"description\": \"Source for Gmail's internal date of the message.\",\n\t\/\/ \"enum\": [\n\t\/\/ \"dateHeader\",\n\t\/\/ \"receivedTime\"\n\t\/\/ ],\n\t\/\/ \"enumDescriptions\": [\n\t\/\/ \"\",\n\t\/\/ \"\"\n\t\/\/ ],\n\t\/\/ \"location\": \"query\",\n\t\/\/ \"type\": \"string\"\n\t\/\/ },\n\t\/\/ \"userId\": {\n\t\/\/ \"default\": \"me\",\n\t\/\/ \"description\": \"The user's email address. The special value me can be used to indicate the authenticated user.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ }\n\t\/\/ },\n\t\/\/ \"path\": \"{userId}\/messages\",\n\t\/\/ \"request\": {\n\t\/\/ \"$ref\": \"Message\"\n\t\/\/ },\n\t\/\/ \"response\": {\n\t\/\/ \"$ref\": \"Message\"\n\t\/\/ },\n\t\/\/ \"scopes\": [\n\t\/\/ \"https:\/\/mail.google.com\/\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.insert\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.modify\"\n\t\/\/ ],\n\t\/\/ \"supportsMediaUpload\": true\n\t\/\/ }\n\n}\n\n\/\/ method id \"gmail.users.messages.list\":\n\ntype UsersMessagesListCall struct {\n\ts *Service\n\tuserId string\n\topt_ map[string]interface{}\n}\n\n\/\/ List: Lists the messages in the user's mailbox.\nfunc (r *UsersMessagesService) List(userId string) *UsersMessagesListCall {\n\tc := &UsersMessagesListCall{s: r.s, opt_: make(map[string]interface{})}\n\tc.userId = userId\n\treturn c\n}\n\n\/\/ IncludeSpamTrash sets the optional parameter \"includeSpamTrash\":\n\/\/ Include messages from SPAM and TRASH in the results.\nfunc (c *UsersMessagesListCall) IncludeSpamTrash(includeSpamTrash bool) *UsersMessagesListCall {\n\tc.opt_[\"includeSpamTrash\"] = includeSpamTrash\n\treturn c\n}\n\n\/\/ LabelIds sets the optional parameter \"labelIds\": Only return messages\n\/\/ with labels that match all of the specified label IDs.\nfunc (c *UsersMessagesListCall) LabelIds(labelIds string) *UsersMessagesListCall {\n\tc.opt_[\"labelIds\"] = labelIds\n\treturn c\n}\n\n\/\/ MaxResults sets the optional parameter \"maxResults\": Maximum number\n\/\/ of messages to return.\nfunc (c *UsersMessagesListCall) MaxResults(maxResults int64) *UsersMessagesListCall {\n\tc.opt_[\"maxResults\"] = maxResults\n\treturn c\n}\n\n\/\/ PageToken sets the optional parameter \"pageToken\": Page token to\n\/\/ retrieve a specific page of results in the list.\nfunc (c *UsersMessagesListCall) PageToken(pageToken string) *UsersMessagesListCall {\n\tc.opt_[\"pageToken\"] = pageToken\n\treturn c\n}\n\n\/\/ Q sets the optional parameter \"q\": Only return messages matching the\n\/\/ specified query. Supports the same query format as the Gmail search\n\/\/ box. For example, \"from:someuser@example.com rfc822msgid: is:unread\".\nfunc (c *UsersMessagesListCall) Q(q string) *UsersMessagesListCall {\n\tc.opt_[\"q\"] = q\n\treturn c\n}\n\n\/\/ Fields allows partial responses to be retrieved.\n\/\/ See https:\/\/developers.google.com\/gdata\/docs\/2.0\/basics#PartialResponse\n\/\/ for more information.\nfunc (c *UsersMessagesListCall) Fields(s ...googleapi.Field) *UsersMessagesListCall {\n\tc.opt_[\"fields\"] = googleapi.CombineFields(s)\n\treturn c\n}\n\nfunc (c *UsersMessagesListCall) doRequest(alt string) (*http.Response, error) {\n\tvar body io.Reader = nil\n\tparams := make(url.Values)\n\tparams.Set(\"alt\", alt)\n\tif v, ok := c.opt_[\"includeSpamTrash\"]; ok {\n\t\tparams.Set(\"includeSpamTrash\", fmt.Sprintf(\"%v\", v))\n\t}\n\tif v, ok := c.opt_[\"labelIds\"]; ok {\n\t\tparams.Set(\"labelIds\", fmt.Sprintf(\"%v\", v))\n\t}\n\tif v, ok := c.opt_[\"maxResults\"]; ok {\n\t\tparams.Set(\"maxResults\", fmt.Sprintf(\"%v\", v))\n\t}\n\tif v, ok := c.opt_[\"pageToken\"]; ok {\n\t\tparams.Set(\"pageToken\", fmt.Sprintf(\"%v\", v))\n\t}\n\tif v, ok := c.opt_[\"q\"]; ok {\n\t\tparams.Set(\"q\", fmt.Sprintf(\"%v\", v))\n\t}\n\tif v, ok := c.opt_[\"fields\"]; ok {\n\t\tparams.Set(\"fields\", fmt.Sprintf(\"%v\", v))\n\t}\n\turls := googleapi.ResolveRelative(c.s.BasePath, \"{userId}\/messages\")\n\turls += \"?\" + params.Encode()\n\treq, _ := http.NewRequest(\"GET\", urls, body)\n\tgoogleapi.Expand(req.URL, map[string]string{\n\t\t\"userId\": c.userId,\n\t})\n\treq.Header.Set(\"User-Agent\", c.s.userAgent())\n\treturn c.s.client.Do(req)\n}\n\nfunc (c *UsersMessagesListCall) Do() (*ListMessagesResponse, error) {\n\tres, err := c.doRequest(\"json\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer googleapi.CloseBody(res)\n\tif err := googleapi.CheckResponse(res); err != nil {\n\t\treturn nil, err\n\t}\n\tvar ret *ListMessagesResponse\n\tif err := json.NewDecoder(res.Body).Decode(&ret); err != nil {\n\t\treturn nil, err\n\t}\n\treturn ret, nil\n\t\/\/ {\n\t\/\/ \"description\": \"Lists the messages in the user's mailbox.\",\n\t\/\/ \"httpMethod\": \"GET\",\n\t\/\/ \"id\": \"gmail.users.messages.list\",\n\t\/\/ \"parameterOrder\": [\n\t\/\/ \"userId\"\n\t\/\/ ],\n\t\/\/ \"parameters\": {\n\t\/\/ \"includeSpamTrash\": {\n\t\/\/ \"default\": \"false\",\n\t\/\/ \"description\": \"Include messages from SPAM and TRASH in the results.\",\n\t\/\/ \"location\": \"query\",\n\t\/\/ \"type\": \"boolean\"\n\t\/\/ },\n\t\/\/ \"labelIds\": {\n\t\/\/ \"description\": \"Only return messages with labels that match all of the specified label IDs.\",\n\t\/\/ \"location\": \"query\",\n\t\/\/ \"repeated\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ },\n\t\/\/ \"maxResults\": {\n\t\/\/ \"default\": \"100\",\n\t\/\/ \"description\": \"Maximum number of messages to return.\",\n\t\/\/ \"format\": \"uint32\",\n\t\/\/ \"location\": \"query\",\n\t\/\/ \"type\": \"integer\"\n\t\/\/ },\n\t\/\/ \"pageToken\": {\n\t\/\/ \"description\": \"Page token to retrieve a specific page of results in the list.\",\n\t\/\/ \"location\": \"query\",\n\t\/\/ \"type\": \"string\"\n\t\/\/ },\n\t\/\/ \"q\": {\n\t\/\/ \"description\": \"Only return messages matching the specified query. Supports the same query format as the Gmail search box. For example, \\\"from:someuser@example.com rfc822msgid: is:unread\\\".\",\n\t\/\/ \"location\": \"query\",\n\t\/\/ \"type\": \"string\"\n\t\/\/ },\n\t\/\/ \"userId\": {\n\t\/\/ \"default\": \"me\",\n\t\/\/ \"description\": \"The user's email address. The special value me can be used to indicate the authenticated user.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ }\n\t\/\/ },\n\t\/\/ \"path\": \"{userId}\/messages\",\n\t\/\/ \"response\": {\n\t\/\/ \"$ref\": \"ListMessagesResponse\"\n\t\/\/ },\n\t\/\/ \"scopes\": [\n\t\/\/ \"https:\/\/mail.google.com\/\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.modify\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.readonly\"\n\t\/\/ ]\n\t\/\/ }\n\n}\n\n\/\/ method id \"gmail.users.messages.modify\":\n\ntype UsersMessagesModifyCall struct {\n\ts *Service\n\tuserId string\n\tid string\n\tmodifymessagerequest *ModifyMessageRequest\n\topt_ map[string]interface{}\n}\n\n\/\/ Modify: Modifies the labels on the specified message.\nfunc (r *UsersMessagesService) Modify(userId string, id string, modifymessagerequest *ModifyMessageRequest) *UsersMessagesModifyCall {\n\tc := &UsersMessagesModifyCall{s: r.s, opt_: make(map[string]interface{})}\n\tc.userId = userId\n\tc.id = id\n\tc.modifymessagerequest = modifymessagerequest\n\treturn c\n}\n\n\/\/ Fields allows partial responses to be retrieved.\n\/\/ See https:\/\/developers.google.com\/gdata\/docs\/2.0\/basics#PartialResponse\n\/\/ for more information.\nfunc (c *UsersMessagesModifyCall) Fields(s ...googleapi.Field) *UsersMessagesModifyCall {\n\tc.opt_[\"fields\"] = googleapi.CombineFields(s)\n\treturn c\n}\n\nfunc (c *UsersMessagesModifyCall) doRequest(alt string) (*http.Response, error) {\n\tvar body io.Reader = nil\n\tbody, err := googleapi.WithoutDataWrapper.JSONReader(c.modifymessagerequest)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tctype := \"application\/json\"\n\tparams := make(url.Values)\n\tparams.Set(\"alt\", alt)\n\tif v, ok := c.opt_[\"fields\"]; ok {\n\t\tparams.Set(\"fields\", fmt.Sprintf(\"%v\", v))\n\t}\n\turls := googleapi.ResolveRelative(c.s.BasePath, \"{userId}\/messages\/{id}\/modify\")\n\turls += \"?\" + params.Encode()\n\treq, _ := http.NewRequest(\"POST\", urls, body)\n\tgoogleapi.Expand(req.URL, map[string]string{\n\t\t\"userId\": c.userId,\n\t\t\"id\": c.id,\n\t})\n\treq.Header.Set(\"Content-Type\", ctype)\n\treq.Header.Set(\"User-Agent\", c.s.userAgent())\n\treturn c.s.client.Do(req)\n}\n\nfunc (c *UsersMessagesModifyCall) Do() (*Message, error) {\n\tres, err := c.doRequest(\"json\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer googleapi.CloseBody(res)\n\tif err := googleapi.CheckResponse(res); err != nil {\n\t\treturn nil, err\n\t}\n\tvar ret *Message\n\tif err := json.NewDecoder(res.Body).Decode(&ret); err != nil {\n\t\treturn nil, err\n\t}\n\treturn ret, nil\n\t\/\/ {\n\t\/\/ \"description\": \"Modifies the labels on the specified message.\",\n\t\/\/ \"httpMethod\": \"POST\",\n\t\/\/ \"id\": \"gmail.users.messages.modify\",\n\t\/\/ \"parameterOrder\": [\n\t\/\/ \"userId\",\n\t\/\/ \"id\"\n\t\/\/ ],\n\t\/\/ \"parameters\": {\n\t\/\/ \"id\": {\n\t\/\/ \"description\": \"The ID of the message to modify.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ },\n\t\/\/ \"userId\": {\n\t\/\/ \"default\": \"me\",\n\t\/\/ \"description\": \"The user's email address. The special value me can be used to indicate the authenticated user.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ }\n\t\/\/ },\n\t\/\/ \"path\": \"{userId}\/messages\/{id}\/modify\",\n\t\/\/ \"request\": {\n\t\/\/ \"$ref\": \"ModifyMessageRequest\"\n\t\/\/ },\n\t\/\/ \"response\": {\n\t\/\/ \"$ref\": \"Message\"\n\t\/\/ },\n\t\/\/ \"scopes\": [\n\t\/\/ \"https:\/\/mail.google.com\/\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.modify\"\n\t\/\/ ]\n\t\/\/ }\n\n}\n\n\/\/ method id \"gmail.users.messages.send\":\n\ntype UsersMessagesSendCall struct {\n\ts *Service\n\tuserId string\n\tmessage *Message\n\topt_ map[string]interface{}\n\tmedia_ io.Reader\n\tresumable_ googleapi.SizeReaderAt\n\tmediaType_ string\n\tctx_ context.Context\n\tprotocol_ string\n}\n\n\/\/ Send: Sends the specified message to the recipients in the To, Cc,\n\/\/ and Bcc headers.\nfunc (r *UsersMessagesService) Send(userId string, message *Message) *UsersMessagesSendCall {\n\tc := &UsersMessagesSendCall{s: r.s, opt_: make(map[string]interface{})}\n\tc.userId = userId\n\tc.message = message\n\treturn c\n}\n\n\/\/ Media specifies the media to upload in a single chunk.\n\/\/ At most one of Media and ResumableMedia may be set.\nfunc (c *UsersMessagesSendCall) Media(r io.Reader) *UsersMessagesSendCall {\n\tc.media_ = r\n\tc.protocol_ = \"multipart\"\n\treturn c\n}\n\n\/\/ ResumableMedia specifies the media to upload in chunks and can be cancelled with ctx.\n\/\/ At most one of Media and ResumableMedia may be set.\n\/\/ mediaType identifies the MIME media type of the upload, such as \"image\/png\".\n\/\/ If mediaType is \"\", it will be auto-detected.\nfunc (c *UsersMessagesSendCall) ResumableMedia(ctx context.Context, r io.ReaderAt, size int64, mediaType string) *UsersMessagesSendCall {\n\tc.ctx_ = ctx\n\tc.resumable_ = io.NewSectionReader(r, 0, size)\n\tc.mediaType_ = mediaType\n\tc.protocol_ = \"resumable\"\n\treturn c\n}\n\n\/\/ ProgressUpdater provides a callback function that will be called after every chunk.\n\/\/ It should be a low-latency function in order to not slow down the upload operation.\n\/\/ This should only be called when using ResumableMedia (as opposed to Media).\nfunc (c *UsersMessagesSendCall) ProgressUpdater(pu googleapi.ProgressUpdater) *UsersMessagesSendCall {\n\tc.opt_[\"progressUpdater\"] = pu\n\treturn c\n}\n\n\/\/ Fields allows partial responses to be retrieved.\n\/\/ See https:\/\/developers.google.com\/gdata\/docs\/2.0\/basics#PartialResponse\n\/\/ for more information.\nfunc (c *UsersMessagesSendCall) Fields(s ...googleapi.Field) *UsersMessagesSendCall {\n\tc.opt_[\"fields\"] = googleapi.CombineFields(s)\n\treturn c\n}\n\nfunc (c *UsersMessagesSendCall) doRequest(alt string) (*http.Response, error) {\n\tvar body io.Reader = nil\n\tbody, err := googleapi.WithoutDataWrapper.JSONReader(c.message)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tctype := \"application\/json\"\n\tparams := make(url.Values)\n\tparams.Set(\"alt\", alt)\n\tif v, ok := c.opt_[\"fields\"]; ok {\n\t\tparams.Set(\"fields\", fmt.Sprintf(\"%v\", v))\n\t}\n\turls := googleapi.ResolveRelative(c.s.BasePath, \"{userId}\/messages\/send\")\n\tif c.media_ != nil || c.resumable_ != nil {\n\t\turls = strings.Replace(urls, \"https:\/\/www.googleapis.com\/\", \"https:\/\/www.googleapis.com\/upload\/\", 1)\n\t\tparams.Set(\"uploadType\", c.protocol_)\n\t}\n\turls += \"?\" + params.Encode()\n\tif c.protocol_ != \"resumable\" {\n\t\tvar cancel func()\n\t\tcancel, _ = googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)\n\t\tif cancel != nil {\n\t\t\tdefer cancel()\n\t\t}\n\t}\n\treq, _ := http.NewRequest(\"POST\", urls, body)\n\tgoogleapi.Expand(req.URL, map[string]string{\n\t\t\"userId\": c.userId,\n\t})\n\tif c.protocol_ == \"resumable\" {\n\t\tif c.mediaType_ == \"\" {\n\t\t\tc.mediaType_ = googleapi.DetectMediaType(c.resumable_)\n\t\t}\n\t\treq.Header.Set(\"X-Upload-Content-Type\", c.mediaType_)\n\t\treq.Header.Set(\"Content-Type\", \"application\/json; charset=utf-8\")\n\t} else {\n\t\treq.Header.Set(\"Content-Type\", ctype)\n\t}\n\treq.Header.Set(\"User-Agent\", c.s.userAgent())\n\treturn c.s.client.Do(req)\n}\n\nfunc (c *UsersMessagesSendCall) Do() (*Message, error) {\n\tres, err := c.doRequest(\"json\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer googleapi.CloseBody(res)\n\tif err := googleapi.CheckResponse(res); err != nil {\n\t\treturn nil, err\n\t}\n\tvar progressUpdater_ googleapi.ProgressUpdater\n\tif v, ok := c.opt_[\"progressUpdater\"]; ok {\n\t\tif pu, ok := v.(googleapi.ProgressUpdater); ok {\n\t\t\tprogressUpdater_ = pu\n\t\t}\n\t}\n\tif c.protocol_ == \"resumable\" {\n\t\tloc := res.Header.Get(\"Location\")\n\t\trx := &googleapi.ResumableUpload{\n\t\t\tClient: c.s.client,\n\t\t\tUserAgent: c.s.userAgent(),\n\t\t\tURI: loc,\n\t\t\tMedia: c.resumable_,\n\t\t\tMediaType: c.mediaType_,\n\t\t\tContentLength: c.resumable_.Size(),\n\t\t\tCallback: progressUpdater_,\n\t\t}\n\t\tres, err = rx.Upload(c.ctx_)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tdefer res.Body.Close()\n\t}\n\tvar ret *Message\n\tif err := json.NewDecoder(res.Body).Decode(&ret); err != nil {\n\t\treturn nil, err\n\t}\n\treturn ret, nil\n\t\/\/ {\n\t\/\/ \"description\": \"Sends the specified message to the recipients in the To, Cc, and Bcc headers.\",\n\t\/\/ \"httpMethod\": \"POST\",\n\t\/\/ \"id\": \"gmail.users.messages.send\",\n\t\/\/ \"mediaUpload\": {\n\t\/\/ \"accept\": [\n\t\/\/ \"message\/rfc822\"\n\t\/\/ ],\n\t\/\/ \"maxSize\": \"35MB\",\n\t\/\/ \"protocols\": {\n\t\/\/ \"resumable\": {\n\t\/\/ \"multipart\": true,\n\t\/\/ \"path\": \"\/resumable\/upload\/gmail\/v1\/users\/{userId}\/messages\/send\"\n\t\/\/ },\n\t\/\/ \"simple\": {\n\t\/\/ \"multipart\": true,\n\t\/\/ \"path\": \"\/upload\/gmail\/v1\/users\/{userId}\/messages\/send\"\n\t\/\/ }\n\t\/\/ }\n\t\/\/ },\n\t\/\/ \"parameterOrder\": [\n\t\/\/ \"userId\"\n\t\/\/ ],\n\t\/\/ \"parameters\": {\n\t\/\/ \"userId\": {\n\t\/\/ \"default\": \"me\",\n\t\/\/ \"description\": \"The user's email address. The special value me can be used to indicate the authenticated user.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ }\n\t\/\/ },\n\t\/\/ \"path\": \"{userId}\/messages\/send\",\n\t\/\/ \"request\": {\n\t\/\/ \"$ref\": \"Message\"\n\t\/\/ },\n\t\/\/ \"response\": {\n\t\/\/ \"$ref\": \"Message\"\n\t\/\/ },\n\t\/\/ \"scopes\": [\n\t\/\/ \"https:\/\/mail.google.com\/\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.compose\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.modify\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.send\"\n\t\/\/ ],\n\t\/\/ \"supportsMediaUpload\": true\n\t\/\/ }\n\n}\n\n\/\/ method id \"gmail.users.messages.trash\":\n\ntype UsersMessagesTrashCall struct {\n\ts *Service\n\tuserId string\n\tid string\n\topt_ map[string]interface{}\n}\n\n\/\/ Trash: Moves the specified message to the trash.\nfunc (r *UsersMessagesService) Trash(userId string, id string) *UsersMessagesTrashCall {\n\tc := &UsersMessagesTrashCall{s: r.s, opt_: make(map[string]interface{})}\n\tc.userId = userId\n\tc.id = id\n\treturn c\n}\n\n\/\/ Fields allows partial responses to be retrieved.\n\/\/ See https:\/\/developers.google.com\/gdata\/docs\/2.0\/basics#PartialResponse\n\/\/ for more information.\nfunc (c *UsersMessagesTrashCall) Fields(s ...googleapi.Field) *UsersMessagesTrashCall {\n\tc.opt_[\"fields\"] = googleapi.CombineFields(s)\n\treturn c\n}\n\nfunc (c *UsersMessagesTrashCall) doRequest(alt string) (*http.Response, error) {\n\tvar body io.Reader = nil\n\tparams := make(url.Values)\n\tparams.Set(\"alt\", alt)\n\tif v, ok := c.opt_[\"fields\"]; ok {\n\t\tparams.Set(\"fields\", fmt.Sprintf(\"%v\", v))\n\t}\n\turls := googleapi.ResolveRelative(c.s.BasePath, \"{userId}\/messages\/{id}\/trash\")\n\turls += \"?\" + params.Encode()\n\treq, _ := http.NewRequest(\"POST\", urls, body)\n\tgoogleapi.Expand(req.URL, map[string]string{\n\t\t\"userId\": c.userId,\n\t\t\"id\": c.id,\n\t})\n\treq.Header.Set(\"User-Agent\", c.s.userAgent())\n\treturn c.s.client.Do(req)\n}\n\nfunc (c *UsersMessagesTrashCall) Do() (*Message, error) {\n\tres, err := c.doRequest(\"json\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer googleapi.CloseBody(res)\n\tif err := googleapi.CheckResponse(res); err != nil {\n\t\treturn nil, err\n\t}\n\tvar ret *Message\n\tif err := json.NewDecoder(res.Body).Decode(&ret); err != nil {\n\t\treturn nil, err\n\t}\n\treturn ret, nil\n\t\/\/ {\n\t\/\/ \"description\": \"Moves the specified message to the trash.\",\n\t\/\/ \"httpMethod\": \"POST\",\n\t\/\/ \"id\": \"gmail.users.messages.trash\",\n\t\/\/ \"parameterOrder\": [\n\t\/\/ \"userId\",\n\t\/\/ \"id\"\n\t\/\/ ],\n\t\/\/ \"parameters\": {\n\t\/\/ \"id\": {\n\t\/\/ \"description\": \"The ID of the message to Trash.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ },\n\t\/\/ \"userId\": {\n\t\/\/ \"default\": \"me\",\n\t\/\/ \"description\": \"The user's email address. The special value me can be used to indicate the authenticated user.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ }\n\t\/\/ },\n\t\/\/ \"path\": \"{userId}\/messages\/{id}\/trash\",\n\t\/\/ \"response\": {\n\t\/\/ \"$ref\": \"Message\"\n\t\/\/ },\n\t\/\/ \"scopes\": [\n\t\/\/ \"https:\/\/mail.google.com\/\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.modify\"\n\t\/\/ ]\n\t\/\/ }\n\n}\n\n\/\/ method id \"gmail.users.messages.untrash\":\n\ntype UsersMessagesUntrashCall struct {\n\ts *Service\n\tuserId string\n\tid string\n\topt_ map[string]interface{}\n}\n\n\/\/ Untrash: Removes the specified message from the trash.\nfunc (r *UsersMessagesService) Untrash(userId string, id string) *UsersMessagesUntrashCall {\n\tc := &UsersMessagesUntrashCall{s: r.s, opt_: make(map[string]interface{})}\n\tc.userId = userId\n\tc.id = id\n\treturn c\n}\n\n\/\/ Fields allows partial responses to be retrieved.\n\/\/ See https:\/\/developers.google.com\/gdata\/docs\/2.0\/basics#PartialResponse\n\/\/ for more information.\nfunc (c *UsersMessagesUntrashCall) Fields(s ...googleapi.Field) *UsersMessagesUntrashCall {\n\tc.opt_[\"fields\"] = googleapi.CombineFields(s)\n\treturn c\n}\n\nfunc (c *UsersMessagesUntrashCall) doRequest(alt string) (*http.Response, error) {\n\tvar body io.Reader = nil\n\tparams := make(url.Values)\n\tparams.Set(\"alt\", alt)\n\tif v, ok := c.opt_[\"fields\"]; ok {\n\t\tparams.Set(\"fields\", fmt.Sprintf(\"%v\", v))\n\t}\n\turls := googleapi.ResolveRelative(c.s.BasePath, \"{userId}\/messages\/{id}\/untrash\")\n\turls += \"?\" + params.Encode()\n\treq, _ := http.NewRequest(\"POST\", urls, body)\n\tgoogleapi.Expand(req.URL, map[string]string{\n\t\t\"userId\": c.userId,\n\t\t\"id\": c.id,\n\t})\n\treq.Header.Set(\"User-Agent\", c.s.userAgent())\n\treturn c.s.client.Do(req)\n}\n\nfunc (c *UsersMessagesUntrashCall) Do() (*Message, error) {\n\tres, err := c.doRequest(\"json\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer googleapi.CloseBody(res)\n\tif err := googleapi.CheckResponse(res); err != nil {\n\t\treturn nil, err\n\t}\n\tvar ret *Message\n\tif err := json.NewDecoder(res.Body).Decode(&ret); err != nil {\n\t\treturn nil, err\n\t}\n\treturn ret, nil\n\t\/\/ {\n\t\/\/ \"description\": \"Removes the specified message from the trash.\",\n\t\/\/ \"httpMethod\": \"POST\",\n\t\/\/ \"id\": \"gmail.users.messages.untrash\",\n\t\/\/ \"parameterOrder\": [\n\t\/\/ \"userId\",\n\t\/\/ \"id\"\n\t\/\/ ],\n\t\/\/ \"parameters\": {\n\t\/\/ \"id\": {\n\t\/\/ \"description\": \"The ID of the message to remove from Trash.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ },\n\t\/\/ \"userId\": {\n\t\/\/ \"default\": \"me\",\n\t\/\/ \"description\": \"The user's email address. The special value me can be used to indicate the authenticated user.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ }\n\t\/\/ },\n\t\/\/ \"path\": \"{userId}\/messages\/{id}\/untrash\",\n\t\/\/ \"response\": {\n\t\/\/ \"$ref\": \"Message\"\n\t\/\/ },\n\t\/\/ \"scopes\": [\n\t\/\/ \"https:\/\/mail.google.com\/\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.modify\"\n\t\/\/ ]\n\t\/\/ }\n\n}\n\n\/\/ method id \"gmail.users.messages.attachments.get\":\n\ntype UsersMessagesAttachmentsGetCall struct {\n\ts *Service\n\tuserId string\n\tmessageId string\n\tid string\n\topt_ map[string]interface{}\n}\n\n\/\/ Get: Gets the specified message attachment.\nfunc (r *UsersMessagesAttachmentsService) Get(userId string, messageId string, id string) *UsersMessagesAttachmentsGetCall {\n\tc := &UsersMessagesAttachmentsGetCall{s: r.s, opt_: make(map[string]interface{})}\n\tc.userId = userId\n\tc.messageId = messageId\n\tc.id = id\n\treturn c\n}\n\n\/\/ Fields allows partial responses to be retrieved.\n\/\/ See https:\/\/developers.google.com\/gdata\/docs\/2.0\/basics#PartialResponse\n\/\/ for more information.\nfunc (c *UsersMessagesAttachmentsGetCall) Fields(s ...googleapi.Field) *UsersMessagesAttachmentsGetCall {\n\tc.opt_[\"fields\"] = googleapi.CombineFields(s)\n\treturn c\n}\n\nfunc (c *UsersMessagesAttachmentsGetCall) doRequest(alt string) (*http.Response, error) {\n\tvar body io.Reader = nil\n\tparams := make(url.Values)\n\tparams.Set(\"alt\", alt)\n\tif v, ok := c.opt_[\"fields\"]; ok {\n\t\tparams.Set(\"fields\", fmt.Sprintf(\"%v\", v))\n\t}\n\turls := googleapi.ResolveRelative(c.s.BasePath, \"{userId}\/messages\/{messageId}\/attachments\/{id}\")\n\turls += \"?\" + params.Encode()\n\treq, _ := http.NewRequest(\"GET\", urls, body)\n\tgoogleapi.Expand(req.URL, map[string]string{\n\t\t\"userId\": c.userId,\n\t\t\"messageId\": c.messageId,\n\t\t\"id\": c.id,\n\t})\n\treq.Header.Set(\"User-Agent\", c.s.userAgent())\n\treturn c.s.client.Do(req)\n}\n\nfunc (c *UsersMessagesAttachmentsGetCall) Do() (*MessagePartBody, error) {\n\tres, err := c.doRequest(\"json\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer googleapi.CloseBody(res)\n\tif err := googleapi.CheckResponse(res); err != nil {\n\t\treturn nil, err\n\t}\n\tvar ret *MessagePartBody\n\tif err := json.NewDecoder(res.Body).Decode(&ret); err != nil {\n\t\treturn nil, err\n\t}\n\treturn ret, nil\n\t\/\/ {\n\t\/\/ \"description\": \"Gets the specified message attachment.\",\n\t\/\/ \"httpMethod\": \"GET\",\n\t\/\/ \"id\": \"gmail.users.messages.attachments.get\",\n\t\/\/ \"parameterOrder\": [\n\t\/\/ \"userId\",\n\t\/\/ \"messageId\",\n\t\/\/ \"id\"\n\t\/\/ ],\n\t\/\/ \"parameters\": {\n\t\/\/ \"id\": {\n\t\/\/ \"description\": \"The ID of the attachment.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ },\n\t\/\/ \"messageId\": {\n\t\/\/ \"description\": \"The ID of the message containing the attachment.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ },\n\t\/\/ \"userId\": {\n\t\/\/ \"default\": \"me\",\n\t\/\/ \"description\": \"The user's email address. The special value me can be used to indicate the authenticated user.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ }\n\t\/\/ },\n\t\/\/ \"path\": \"{userId}\/messages\/{messageId}\/attachments\/{id}\",\n\t\/\/ \"response\": {\n\t\/\/ \"$ref\": \"MessagePartBody\"\n\t\/\/ },\n\t\/\/ \"scopes\": [\n\t\/\/ \"https:\/\/mail.google.com\/\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.modify\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.readonly\"\n\t\/\/ ]\n\t\/\/ }\n\n}\n\n\/\/ method id \"gmail.users.threads.delete\":\n\ntype UsersThreadsDeleteCall struct {\n\ts *Service\n\tuserId string\n\tid string\n\topt_ map[string]interface{}\n}\n\n\/\/ Delete: Immediately and permanently deletes the specified thread.\n\/\/ This operation cannot be undone. Prefer threads.trash instead.\nfunc (r *UsersThreadsService) Delete(userId string, id string) *UsersThreadsDeleteCall {\n\tc := &UsersThreadsDeleteCall{s: r.s, opt_: make(map[string]interface{})}\n\tc.userId = userId\n\tc.id = id\n\treturn c\n}\n\n\/\/ Fields allows partial responses to be retrieved.\n\/\/ See https:\/\/developers.google.com\/gdata\/docs\/2.0\/basics#PartialResponse\n\/\/ for more information.\nfunc (c *UsersThreadsDeleteCall) Fields(s ...googleapi.Field) *UsersThreadsDeleteCall {\n\tc.opt_[\"fields\"] = googleapi.CombineFields(s)\n\treturn c\n}\n\nfunc (c *UsersThreadsDeleteCall) doRequest(alt string) (*http.Response, error) {\n\tvar body io.Reader = nil\n\tparams := make(url.Values)\n\tparams.Set(\"alt\", alt)\n\tif v, ok := c.opt_[\"fields\"]; ok {\n\t\tparams.Set(\"fields\", fmt.Sprintf(\"%v\", v))\n\t}\n\turls := googleapi.ResolveRelative(c.s.BasePath, \"{userId}\/threads\/{id}\")\n\turls += \"?\" + params.Encode()\n\treq, _ := http.NewRequest(\"DELETE\", urls, body)\n\tgoogleapi.Expand(req.URL, map[string]string{\n\t\t\"userId\": c.userId,\n\t\t\"id\": c.id,\n\t})\n\treq.Header.Set(\"User-Agent\", c.s.userAgent())\n\treturn c.s.client.Do(req)\n}\n\nfunc (c *UsersThreadsDeleteCall) Do() error {\n\tres, err := c.doRequest(\"json\")\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer googleapi.CloseBody(res)\n\tif err := googleapi.CheckResponse(res); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n\t\/\/ {\n\t\/\/ \"description\": \"Immediately and permanently deletes the specified thread. This operation cannot be undone. Prefer threads.trash instead.\",\n\t\/\/ \"httpMethod\": \"DELETE\",\n\t\/\/ \"id\": \"gmail.users.threads.delete\",\n\t\/\/ \"parameterOrder\": [\n\t\/\/ \"userId\",\n\t\/\/ \"id\"\n\t\/\/ ],\n\t\/\/ \"parameters\": {\n\t\/\/ \"id\": {\n\t\/\/ \"description\": \"ID of the Thread to delete.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ },\n\t\/\/ \"userId\": {\n\t\/\/ \"default\": \"me\",\n\t\/\/ \"description\": \"The user's email address. The special value me can be used to indicate the authenticated user.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ }\n\t\/\/ },\n\t\/\/ \"path\": \"{userId}\/threads\/{id}\",\n\t\/\/ \"scopes\": [\n\t\/\/ \"https:\/\/mail.google.com\/\"\n\t\/\/ ]\n\t\/\/ }\n\n}\n\n\/\/ method id \"gmail.users.threads.get\":\n\ntype UsersThreadsGetCall struct {\n\ts *Service\n\tuserId string\n\tid string\n\topt_ map[string]interface{}\n}\n\n\/\/ Get: Gets the specified thread.\nfunc (r *UsersThreadsService) Get(userId string, id string) *UsersThreadsGetCall {\n\tc := &UsersThreadsGetCall{s: r.s, opt_: make(map[string]interface{})}\n\tc.userId = userId\n\tc.id = id\n\treturn c\n}\n\n\/\/ Format sets the optional parameter \"format\": The format to return the\n\/\/ messages in.\n\/\/\n\/\/ Possible values:\n\/\/ \"full\" (default)\n\/\/ \"metadata\"\n\/\/ \"minimal\"\nfunc (c *UsersThreadsGetCall) Format(format string) *UsersThreadsGetCall {\n\tc.opt_[\"format\"] = format\n\treturn c\n}\n\n\/\/ MetadataHeaders sets the optional parameter \"metadataHeaders\": When\n\/\/ given and format is METADATA, only include headers specified.\nfunc (c *UsersThreadsGetCall) MetadataHeaders(metadataHeaders string) *UsersThreadsGetCall {\n\tc.opt_[\"metadataHeaders\"] = metadataHeaders\n\treturn c\n}\n\n\/\/ Fields allows partial responses to be retrieved.\n\/\/ See https:\/\/developers.google.com\/gdata\/docs\/2.0\/basics#PartialResponse\n\/\/ for more information.\nfunc (c *UsersThreadsGetCall) Fields(s ...googleapi.Field) *UsersThreadsGetCall {\n\tc.opt_[\"fields\"] = googleapi.CombineFields(s)\n\treturn c\n}\n\nfunc (c *UsersThreadsGetCall) doRequest(alt string) (*http.Response, error) {\n\tvar body io.Reader = nil\n\tparams := make(url.Values)\n\tparams.Set(\"alt\", alt)\n\tif v, ok := c.opt_[\"format\"]; ok {\n\t\tparams.Set(\"format\", fmt.Sprintf(\"%v\", v))\n\t}\n\tif v, ok := c.opt_[\"metadataHeaders\"]; ok {\n\t\tparams.Set(\"metadataHeaders\", fmt.Sprintf(\"%v\", v))\n\t}\n\tif v, ok := c.opt_[\"fields\"]; ok {\n\t\tparams.Set(\"fields\", fmt.Sprintf(\"%v\", v))\n\t}\n\turls := googleapi.ResolveRelative(c.s.BasePath, \"{userId}\/threads\/{id}\")\n\turls += \"?\" + params.Encode()\n\treq, _ := http.NewRequest(\"GET\", urls, body)\n\tgoogleapi.Expand(req.URL, map[string]string{\n\t\t\"userId\": c.userId,\n\t\t\"id\": c.id,\n\t})\n\treq.Header.Set(\"User-Agent\", c.s.userAgent())\n\treturn c.s.client.Do(req)\n}\n\nfunc (c *UsersThreadsGetCall) Do() (*Thread, error) {\n\tres, err := c.doRequest(\"json\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer googleapi.CloseBody(res)\n\tif err := googleapi.CheckResponse(res); err != nil {\n\t\treturn nil, err\n\t}\n\tvar ret *Thread\n\tif err := json.NewDecoder(res.Body).Decode(&ret); err != nil {\n\t\treturn nil, err\n\t}\n\treturn ret, nil\n\t\/\/ {\n\t\/\/ \"description\": \"Gets the specified thread.\",\n\t\/\/ \"httpMethod\": \"GET\",\n\t\/\/ \"id\": \"gmail.users.threads.get\",\n\t\/\/ \"parameterOrder\": [\n\t\/\/ \"userId\",\n\t\/\/ \"id\"\n\t\/\/ ],\n\t\/\/ \"parameters\": {\n\t\/\/ \"format\": {\n\t\/\/ \"default\": \"full\",\n\t\/\/ \"description\": \"The format to return the messages in.\",\n\t\/\/ \"enum\": [\n\t\/\/ \"full\",\n\t\/\/ \"metadata\",\n\t\/\/ \"minimal\"\n\t\/\/ ],\n\t\/\/ \"enumDescriptions\": [\n\t\/\/ \"\",\n\t\/\/ \"\",\n\t\/\/ \"\"\n\t\/\/ ],\n\t\/\/ \"location\": \"query\",\n\t\/\/ \"type\": \"string\"\n\t\/\/ },\n\t\/\/ \"id\": {\n\t\/\/ \"description\": \"The ID of the thread to retrieve.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ },\n\t\/\/ \"metadataHeaders\": {\n\t\/\/ \"description\": \"When given and format is METADATA, only include headers specified.\",\n\t\/\/ \"location\": \"query\",\n\t\/\/ \"repeated\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ },\n\t\/\/ \"userId\": {\n\t\/\/ \"default\": \"me\",\n\t\/\/ \"description\": \"The user's email address. The special value me can be used to indicate the authenticated user.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ }\n\t\/\/ },\n\t\/\/ \"path\": \"{userId}\/threads\/{id}\",\n\t\/\/ \"response\": {\n\t\/\/ \"$ref\": \"Thread\"\n\t\/\/ },\n\t\/\/ \"scopes\": [\n\t\/\/ \"https:\/\/mail.google.com\/\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.modify\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.readonly\"\n\t\/\/ ]\n\t\/\/ }\n\n}\n\n\/\/ method id \"gmail.users.threads.list\":\n\ntype UsersThreadsListCall struct {\n\ts *Service\n\tuserId string\n\topt_ map[string]interface{}\n}\n\n\/\/ List: Lists the threads in the user's mailbox.\nfunc (r *UsersThreadsService) List(userId string) *UsersThreadsListCall {\n\tc := &UsersThreadsListCall{s: r.s, opt_: make(map[string]interface{})}\n\tc.userId = userId\n\treturn c\n}\n\n\/\/ IncludeSpamTrash sets the optional parameter \"includeSpamTrash\":\n\/\/ Include threads from SPAM and TRASH in the results.\nfunc (c *UsersThreadsListCall) IncludeSpamTrash(includeSpamTrash bool) *UsersThreadsListCall {\n\tc.opt_[\"includeSpamTrash\"] = includeSpamTrash\n\treturn c\n}\n\n\/\/ LabelIds sets the optional parameter \"labelIds\": Only return threads\n\/\/ with labels that match all of the specified label IDs.\nfunc (c *UsersThreadsListCall) LabelIds(labelIds string) *UsersThreadsListCall {\n\tc.opt_[\"labelIds\"] = labelIds\n\treturn c\n}\n\n\/\/ MaxResults sets the optional parameter \"maxResults\": Maximum number\n\/\/ of threads to return.\nfunc (c *UsersThreadsListCall) MaxResults(maxResults int64) *UsersThreadsListCall {\n\tc.opt_[\"maxResults\"] = maxResults\n\treturn c\n}\n\n\/\/ PageToken sets the optional parameter \"pageToken\": Page token to\n\/\/ retrieve a specific page of results in the list.\nfunc (c *UsersThreadsListCall) PageToken(pageToken string) *UsersThreadsListCall {\n\tc.opt_[\"pageToken\"] = pageToken\n\treturn c\n}\n\n\/\/ Q sets the optional parameter \"q\": Only return threads matching the\n\/\/ specified query. Supports the same query format as the Gmail search\n\/\/ box. For example, \"from:someuser@example.com rfc822msgid: is:unread\".\nfunc (c *UsersThreadsListCall) Q(q string) *UsersThreadsListCall {\n\tc.opt_[\"q\"] = q\n\treturn c\n}\n\n\/\/ Fields allows partial responses to be retrieved.\n\/\/ See https:\/\/developers.google.com\/gdata\/docs\/2.0\/basics#PartialResponse\n\/\/ for more information.\nfunc (c *UsersThreadsListCall) Fields(s ...googleapi.Field) *UsersThreadsListCall {\n\tc.opt_[\"fields\"] = googleapi.CombineFields(s)\n\treturn c\n}\n\nfunc (c *UsersThreadsListCall) doRequest(alt string) (*http.Response, error) {\n\tvar body io.Reader = nil\n\tparams := make(url.Values)\n\tparams.Set(\"alt\", alt)\n\tif v, ok := c.opt_[\"includeSpamTrash\"]; ok {\n\t\tparams.Set(\"includeSpamTrash\", fmt.Sprintf(\"%v\", v))\n\t}\n\tif v, ok := c.opt_[\"labelIds\"]; ok {\n\t\tparams.Set(\"labelIds\", fmt.Sprintf(\"%v\", v))\n\t}\n\tif v, ok := c.opt_[\"maxResults\"]; ok {\n\t\tparams.Set(\"maxResults\", fmt.Sprintf(\"%v\", v))\n\t}\n\tif v, ok := c.opt_[\"pageToken\"]; ok {\n\t\tparams.Set(\"pageToken\", fmt.Sprintf(\"%v\", v))\n\t}\n\tif v, ok := c.opt_[\"q\"]; ok {\n\t\tparams.Set(\"q\", fmt.Sprintf(\"%v\", v))\n\t}\n\tif v, ok := c.opt_[\"fields\"]; ok {\n\t\tparams.Set(\"fields\", fmt.Sprintf(\"%v\", v))\n\t}\n\turls := googleapi.ResolveRelative(c.s.BasePath, \"{userId}\/threads\")\n\turls += \"?\" + params.Encode()\n\treq, _ := http.NewRequest(\"GET\", urls, body)\n\tgoogleapi.Expand(req.URL, map[string]string{\n\t\t\"userId\": c.userId,\n\t})\n\treq.Header.Set(\"User-Agent\", c.s.userAgent())\n\treturn c.s.client.Do(req)\n}\n\nfunc (c *UsersThreadsListCall) Do() (*ListThreadsResponse, error) {\n\tres, err := c.doRequest(\"json\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer googleapi.CloseBody(res)\n\tif err := googleapi.CheckResponse(res); err != nil {\n\t\treturn nil, err\n\t}\n\tvar ret *ListThreadsResponse\n\tif err := json.NewDecoder(res.Body).Decode(&ret); err != nil {\n\t\treturn nil, err\n\t}\n\treturn ret, nil\n\t\/\/ {\n\t\/\/ \"description\": \"Lists the threads in the user's mailbox.\",\n\t\/\/ \"httpMethod\": \"GET\",\n\t\/\/ \"id\": \"gmail.users.threads.list\",\n\t\/\/ \"parameterOrder\": [\n\t\/\/ \"userId\"\n\t\/\/ ],\n\t\/\/ \"parameters\": {\n\t\/\/ \"includeSpamTrash\": {\n\t\/\/ \"default\": \"false\",\n\t\/\/ \"description\": \"Include threads from SPAM and TRASH in the results.\",\n\t\/\/ \"location\": \"query\",\n\t\/\/ \"type\": \"boolean\"\n\t\/\/ },\n\t\/\/ \"labelIds\": {\n\t\/\/ \"description\": \"Only return threads with labels that match all of the specified label IDs.\",\n\t\/\/ \"location\": \"query\",\n\t\/\/ \"repeated\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ },\n\t\/\/ \"maxResults\": {\n\t\/\/ \"default\": \"100\",\n\t\/\/ \"description\": \"Maximum number of threads to return.\",\n\t\/\/ \"format\": \"uint32\",\n\t\/\/ \"location\": \"query\",\n\t\/\/ \"type\": \"integer\"\n\t\/\/ },\n\t\/\/ \"pageToken\": {\n\t\/\/ \"description\": \"Page token to retrieve a specific page of results in the list.\",\n\t\/\/ \"location\": \"query\",\n\t\/\/ \"type\": \"string\"\n\t\/\/ },\n\t\/\/ \"q\": {\n\t\/\/ \"description\": \"Only return threads matching the specified query. Supports the same query format as the Gmail search box. For example, \\\"from:someuser@example.com rfc822msgid: is:unread\\\".\",\n\t\/\/ \"location\": \"query\",\n\t\/\/ \"type\": \"string\"\n\t\/\/ },\n\t\/\/ \"userId\": {\n\t\/\/ \"default\": \"me\",\n\t\/\/ \"description\": \"The user's email address. The special value me can be used to indicate the authenticated user.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ }\n\t\/\/ },\n\t\/\/ \"path\": \"{userId}\/threads\",\n\t\/\/ \"response\": {\n\t\/\/ \"$ref\": \"ListThreadsResponse\"\n\t\/\/ },\n\t\/\/ \"scopes\": [\n\t\/\/ \"https:\/\/mail.google.com\/\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.modify\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.readonly\"\n\t\/\/ ]\n\t\/\/ }\n\n}\n\n\/\/ method id \"gmail.users.threads.modify\":\n\ntype UsersThreadsModifyCall struct {\n\ts *Service\n\tuserId string\n\tid string\n\tmodifythreadrequest *ModifyThreadRequest\n\topt_ map[string]interface{}\n}\n\n\/\/ Modify: Modifies the labels applied to the thread. This applies to\n\/\/ all messages in the thread.\nfunc (r *UsersThreadsService) Modify(userId string, id string, modifythreadrequest *ModifyThreadRequest) *UsersThreadsModifyCall {\n\tc := &UsersThreadsModifyCall{s: r.s, opt_: make(map[string]interface{})}\n\tc.userId = userId\n\tc.id = id\n\tc.modifythreadrequest = modifythreadrequest\n\treturn c\n}\n\n\/\/ Fields allows partial responses to be retrieved.\n\/\/ See https:\/\/developers.google.com\/gdata\/docs\/2.0\/basics#PartialResponse\n\/\/ for more information.\nfunc (c *UsersThreadsModifyCall) Fields(s ...googleapi.Field) *UsersThreadsModifyCall {\n\tc.opt_[\"fields\"] = googleapi.CombineFields(s)\n\treturn c\n}\n\nfunc (c *UsersThreadsModifyCall) doRequest(alt string) (*http.Response, error) {\n\tvar body io.Reader = nil\n\tbody, err := googleapi.WithoutDataWrapper.JSONReader(c.modifythreadrequest)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tctype := \"application\/json\"\n\tparams := make(url.Values)\n\tparams.Set(\"alt\", alt)\n\tif v, ok := c.opt_[\"fields\"]; ok {\n\t\tparams.Set(\"fields\", fmt.Sprintf(\"%v\", v))\n\t}\n\turls := googleapi.ResolveRelative(c.s.BasePath, \"{userId}\/threads\/{id}\/modify\")\n\turls += \"?\" + params.Encode()\n\treq, _ := http.NewRequest(\"POST\", urls, body)\n\tgoogleapi.Expand(req.URL, map[string]string{\n\t\t\"userId\": c.userId,\n\t\t\"id\": c.id,\n\t})\n\treq.Header.Set(\"Content-Type\", ctype)\n\treq.Header.Set(\"User-Agent\", c.s.userAgent())\n\treturn c.s.client.Do(req)\n}\n\nfunc (c *UsersThreadsModifyCall) Do() (*Thread, error) {\n\tres, err := c.doRequest(\"json\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer googleapi.CloseBody(res)\n\tif err := googleapi.CheckResponse(res); err != nil {\n\t\treturn nil, err\n\t}\n\tvar ret *Thread\n\tif err := json.NewDecoder(res.Body).Decode(&ret); err != nil {\n\t\treturn nil, err\n\t}\n\treturn ret, nil\n\t\/\/ {\n\t\/\/ \"description\": \"Modifies the labels applied to the thread. This applies to all messages in the thread.\",\n\t\/\/ \"httpMethod\": \"POST\",\n\t\/\/ \"id\": \"gmail.users.threads.modify\",\n\t\/\/ \"parameterOrder\": [\n\t\/\/ \"userId\",\n\t\/\/ \"id\"\n\t\/\/ ],\n\t\/\/ \"parameters\": {\n\t\/\/ \"id\": {\n\t\/\/ \"description\": \"The ID of the thread to modify.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ },\n\t\/\/ \"userId\": {\n\t\/\/ \"default\": \"me\",\n\t\/\/ \"description\": \"The user's email address. The special value me can be used to indicate the authenticated user.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ }\n\t\/\/ },\n\t\/\/ \"path\": \"{userId}\/threads\/{id}\/modify\",\n\t\/\/ \"request\": {\n\t\/\/ \"$ref\": \"ModifyThreadRequest\"\n\t\/\/ },\n\t\/\/ \"response\": {\n\t\/\/ \"$ref\": \"Thread\"\n\t\/\/ },\n\t\/\/ \"scopes\": [\n\t\/\/ \"https:\/\/mail.google.com\/\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.modify\"\n\t\/\/ ]\n\t\/\/ }\n\n}\n\n\/\/ method id \"gmail.users.threads.trash\":\n\ntype UsersThreadsTrashCall struct {\n\ts *Service\n\tuserId string\n\tid string\n\topt_ map[string]interface{}\n}\n\n\/\/ Trash: Moves the specified thread to the trash.\nfunc (r *UsersThreadsService) Trash(userId string, id string) *UsersThreadsTrashCall {\n\tc := &UsersThreadsTrashCall{s: r.s, opt_: make(map[string]interface{})}\n\tc.userId = userId\n\tc.id = id\n\treturn c\n}\n\n\/\/ Fields allows partial responses to be retrieved.\n\/\/ See https:\/\/developers.google.com\/gdata\/docs\/2.0\/basics#PartialResponse\n\/\/ for more information.\nfunc (c *UsersThreadsTrashCall) Fields(s ...googleapi.Field) *UsersThreadsTrashCall {\n\tc.opt_[\"fields\"] = googleapi.CombineFields(s)\n\treturn c\n}\n\nfunc (c *UsersThreadsTrashCall) doRequest(alt string) (*http.Response, error) {\n\tvar body io.Reader = nil\n\tparams := make(url.Values)\n\tparams.Set(\"alt\", alt)\n\tif v, ok := c.opt_[\"fields\"]; ok {\n\t\tparams.Set(\"fields\", fmt.Sprintf(\"%v\", v))\n\t}\n\turls := googleapi.ResolveRelative(c.s.BasePath, \"{userId}\/threads\/{id}\/trash\")\n\turls += \"?\" + params.Encode()\n\treq, _ := http.NewRequest(\"POST\", urls, body)\n\tgoogleapi.Expand(req.URL, map[string]string{\n\t\t\"userId\": c.userId,\n\t\t\"id\": c.id,\n\t})\n\treq.Header.Set(\"User-Agent\", c.s.userAgent())\n\treturn c.s.client.Do(req)\n}\n\nfunc (c *UsersThreadsTrashCall) Do() (*Thread, error) {\n\tres, err := c.doRequest(\"json\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer googleapi.CloseBody(res)\n\tif err := googleapi.CheckResponse(res); err != nil {\n\t\treturn nil, err\n\t}\n\tvar ret *Thread\n\tif err := json.NewDecoder(res.Body).Decode(&ret); err != nil {\n\t\treturn nil, err\n\t}\n\treturn ret, nil\n\t\/\/ {\n\t\/\/ \"description\": \"Moves the specified thread to the trash.\",\n\t\/\/ \"httpMethod\": \"POST\",\n\t\/\/ \"id\": \"gmail.users.threads.trash\",\n\t\/\/ \"parameterOrder\": [\n\t\/\/ \"userId\",\n\t\/\/ \"id\"\n\t\/\/ ],\n\t\/\/ \"parameters\": {\n\t\/\/ \"id\": {\n\t\/\/ \"description\": \"The ID of the thread to Trash.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ },\n\t\/\/ \"userId\": {\n\t\/\/ \"default\": \"me\",\n\t\/\/ \"description\": \"The user's email address. The special value me can be used to indicate the authenticated user.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ }\n\t\/\/ },\n\t\/\/ \"path\": \"{userId}\/threads\/{id}\/trash\",\n\t\/\/ \"response\": {\n\t\/\/ \"$ref\": \"Thread\"\n\t\/\/ },\n\t\/\/ \"scopes\": [\n\t\/\/ \"https:\/\/mail.google.com\/\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.modify\"\n\t\/\/ ]\n\t\/\/ }\n\n}\n\n\/\/ method id \"gmail.users.threads.untrash\":\n\ntype UsersThreadsUntrashCall struct {\n\ts *Service\n\tuserId string\n\tid string\n\topt_ map[string]interface{}\n}\n\n\/\/ Untrash: Removes the specified thread from the trash.\nfunc (r *UsersThreadsService) Untrash(userId string, id string) *UsersThreadsUntrashCall {\n\tc := &UsersThreadsUntrashCall{s: r.s, opt_: make(map[string]interface{})}\n\tc.userId = userId\n\tc.id = id\n\treturn c\n}\n\n\/\/ Fields allows partial responses to be retrieved.\n\/\/ See https:\/\/developers.google.com\/gdata\/docs\/2.0\/basics#PartialResponse\n\/\/ for more information.\nfunc (c *UsersThreadsUntrashCall) Fields(s ...googleapi.Field) *UsersThreadsUntrashCall {\n\tc.opt_[\"fields\"] = googleapi.CombineFields(s)\n\treturn c\n}\n\nfunc (c *UsersThreadsUntrashCall) doRequest(alt string) (*http.Response, error) {\n\tvar body io.Reader = nil\n\tparams := make(url.Values)\n\tparams.Set(\"alt\", alt)\n\tif v, ok := c.opt_[\"fields\"]; ok {\n\t\tparams.Set(\"fields\", fmt.Sprintf(\"%v\", v))\n\t}\n\turls := googleapi.ResolveRelative(c.s.BasePath, \"{userId}\/threads\/{id}\/untrash\")\n\turls += \"?\" + params.Encode()\n\treq, _ := http.NewRequest(\"POST\", urls, body)\n\tgoogleapi.Expand(req.URL, map[string]string{\n\t\t\"userId\": c.userId,\n\t\t\"id\": c.id,\n\t})\n\treq.Header.Set(\"User-Agent\", c.s.userAgent())\n\treturn c.s.client.Do(req)\n}\n\nfunc (c *UsersThreadsUntrashCall) Do() (*Thread, error) {\n\tres, err := c.doRequest(\"json\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer googleapi.CloseBody(res)\n\tif err := googleapi.CheckResponse(res); err != nil {\n\t\treturn nil, err\n\t}\n\tvar ret *Thread\n\tif err := json.NewDecoder(res.Body).Decode(&ret); err != nil {\n\t\treturn nil, err\n\t}\n\treturn ret, nil\n\t\/\/ {\n\t\/\/ \"description\": \"Removes the specified thread from the trash.\",\n\t\/\/ \"httpMethod\": \"POST\",\n\t\/\/ \"id\": \"gmail.users.threads.untrash\",\n\t\/\/ \"parameterOrder\": [\n\t\/\/ \"userId\",\n\t\/\/ \"id\"\n\t\/\/ ],\n\t\/\/ \"parameters\": {\n\t\/\/ \"id\": {\n\t\/\/ \"description\": \"The ID of the thread to remove from Trash.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ },\n\t\/\/ \"userId\": {\n\t\/\/ \"default\": \"me\",\n\t\/\/ \"description\": \"The user's email address. The special value me can be used to indicate the authenticated user.\",\n\t\/\/ \"location\": \"path\",\n\t\/\/ \"required\": true,\n\t\/\/ \"type\": \"string\"\n\t\/\/ }\n\t\/\/ },\n\t\/\/ \"path\": \"{userId}\/threads\/{id}\/untrash\",\n\t\/\/ \"response\": {\n\t\/\/ \"$ref\": \"Thread\"\n\t\/\/ },\n\t\/\/ \"scopes\": [\n\t\/\/ \"https:\/\/mail.google.com\/\",\n\t\/\/ \"https:\/\/www.googleapis.com\/auth\/gmail.modify\"\n\t\/\/ ]\n\t\/\/ }\n\n}\n","avg_line_length":30.0092307692,"max_line_length":755,"alphanum_fraction":0.643655432} +{"size":17177,"ext":"go","lang":"Go","max_stars_count":null,"content":"package tasks\n\nimport (\n\t\"crypto\/md5\"\n\t\"encoding\/json\"\n\t\"fmt\"\n\t\"github.com\/ansible-semaphore\/semaphore\/lib\"\n\t\"io\"\n\t\"io\/ioutil\"\n\t\"os\"\n\t\"strconv\"\n\t\"strings\"\n\t\"time\"\n\n\tlog \"github.com\/Sirupsen\/logrus\"\n\t\"github.com\/ansible-semaphore\/semaphore\/api\/sockets\"\n\t\"github.com\/ansible-semaphore\/semaphore\/db\"\n\t\"github.com\/ansible-semaphore\/semaphore\/util\"\n)\n\nconst (\n\tgitURLFilePrefix = \"file:\/\/\"\n)\n\ntype TaskRunner struct {\n\ttask db.Task\n\ttemplate db.Template\n\tinventory db.Inventory\n\trepository db.Repository\n\tenvironment db.Environment\n\n\tusers []int\n\thosts []string\n\talertChat *string\n\talert bool\n\tprepared bool\n\tprocess *os.Process\n\tpool *TaskPool\n}\n\n\/\/func (t *TaskRunner) validate() error {\n\/\/\tif t.task.ProjectID != t.template.ProjectID ||\n\/\/\t\tt.task.ProjectID != t.inventory.ProjectID ||\n\/\/\t\tt.task.ProjectID != t.repository.ProjectID ||\n\/\/\t\tt.task.ProjectID != t.environment.ProjectID {\n\/\/\t\treturn fmt.Errorf(\"invalid project id\")\n\/\/\t}\n\/\/\treturn nil\n\/\/}\n\nfunc getMD5Hash(filepath string) (string, error) {\n\tfile, err := os.Open(filepath)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\tdefer file.Close()\n\n\thash := md5.New()\n\tif _, err := io.Copy(hash, file); err != nil {\n\t\treturn \"\", err\n\t}\n\treturn fmt.Sprintf(\"%x\", hash.Sum(nil)), nil\n}\n\nfunc (t *TaskRunner) getRepoPath() string {\n\trepo := lib.GitRepository{\n\t\tLogger: t,\n\t\tTemplateID: t.template.ID,\n\t\tRepository: t.repository,\n\t}\n\n\treturn repo.GetFullPath()\n}\n\nfunc (t *TaskRunner) setStatus(status string) {\n\tif t.task.Status == db.TaskStoppingStatus {\n\t\tswitch status {\n\t\tcase db.TaskFailStatus:\n\t\t\tstatus = db.TaskStoppedStatus\n\t\tcase db.TaskStoppedStatus:\n\t\tdefault:\n\t\t\tpanic(\"stopping TaskRunner cannot be \" + status)\n\t\t}\n\t}\n\n\tt.task.Status = status\n\n\tt.updateStatus()\n\n\tif status == db.TaskFailStatus {\n\t\tt.sendMailAlert()\n\t}\n\n\tif status == db.TaskSuccessStatus || status == db.TaskFailStatus {\n\t\tt.sendTelegramAlert()\n\t}\n}\n\nfunc (t *TaskRunner) updateStatus() {\n\tfor _, user := range t.users {\n\t\tb, err := json.Marshal(&map[string]interface{}{\n\t\t\t\"type\": \"update\",\n\t\t\t\"start\": t.task.Start,\n\t\t\t\"end\": t.task.End,\n\t\t\t\"status\": t.task.Status,\n\t\t\t\"task_id\": t.task.ID,\n\t\t\t\"template_id\": t.task.TemplateID,\n\t\t\t\"project_id\": t.task.ProjectID,\n\t\t\t\"version\": t.task.Version,\n\t\t})\n\n\t\tutil.LogPanic(err)\n\n\t\tsockets.Message(user, b)\n\t}\n\n\tif err := t.pool.store.UpdateTask(t.task); err != nil {\n\t\tt.panicOnError(err, \"Failed to update TaskRunner status\")\n\t}\n}\n\nfunc (t *TaskRunner) fail() {\n\tt.setStatus(db.TaskFailStatus)\n}\n\nfunc (t *TaskRunner) destroyKeys() {\n\terr := t.repository.SSHKey.Destroy()\n\tif err != nil {\n\t\tt.Log(\"Can't destroy repository key, error: \" + err.Error())\n\t}\n\n\terr = t.inventory.SSHKey.Destroy()\n\tif err != nil {\n\t\tt.Log(\"Can't destroy inventory user key, error: \" + err.Error())\n\t}\n\n\terr = t.inventory.BecomeKey.Destroy()\n\tif err != nil {\n\t\tt.Log(\"Can't destroy inventory become user key, error: \" + err.Error())\n\t}\n\n\terr = t.template.VaultKey.Destroy()\n\tif err != nil {\n\t\tt.Log(\"Can't destroy inventory vault password file, error: \" + err.Error())\n\t}\n}\n\nfunc (t *TaskRunner) createTaskEvent() {\n\tobjType := db.EventTask\n\tdesc := \"Task ID \" + strconv.Itoa(t.task.ID) + \" (\" + t.template.Name + \")\" + \" finished - \" + strings.ToUpper(t.task.Status)\n\n\t_, err := t.pool.store.CreateEvent(db.Event{\n\t\tUserID: t.task.UserID,\n\t\tProjectID: &t.task.ProjectID,\n\t\tObjectType: &objType,\n\t\tObjectID: &t.task.ID,\n\t\tDescription: &desc,\n\t})\n\n\tif err != nil {\n\t\tt.panicOnError(err, \"Fatal error inserting an event\")\n\t}\n}\n\nfunc (t *TaskRunner) prepareRun() {\n\tt.prepared = false\n\n\tdefer func() {\n\t\tlog.Info(\"Stopped preparing TaskRunner \" + strconv.Itoa(t.task.ID))\n\t\tlog.Info(\"Release resource locker with TaskRunner \" + strconv.Itoa(t.task.ID))\n\t\tresourceLocker <- &resourceLock{lock: false, holder: t}\n\n\t\tt.createTaskEvent()\n\t}()\n\n\tt.Log(\"Preparing: \" + strconv.Itoa(t.task.ID))\n\n\terr := checkTmpDir(util.Config.TmpPath)\n\tif err != nil {\n\t\tt.Log(\"Creating tmp dir failed: \" + err.Error())\n\t\tt.fail()\n\t\treturn\n\t}\n\n\terr = t.populateDetails()\n\tif err != nil {\n\t\tt.Log(\"Error: \" + err.Error())\n\t\tt.fail()\n\t\treturn\n\t}\n\n\tobjType := db.EventTask\n\tdesc := \"Task ID \" + strconv.Itoa(t.task.ID) + \" (\" + t.template.Name + \")\" + \" is preparing\"\n\t_, err = t.pool.store.CreateEvent(db.Event{\n\t\tUserID: t.task.UserID,\n\t\tProjectID: &t.task.ProjectID,\n\t\tObjectType: &objType,\n\t\tObjectID: &t.task.ID,\n\t\tDescription: &desc,\n\t})\n\n\tif err != nil {\n\t\tt.Log(\"Fatal error inserting an event\")\n\t\tpanic(err)\n\t}\n\n\tt.Log(\"Prepare TaskRunner with template: \" + t.template.Name + \"\\n\")\n\n\tt.updateStatus()\n\n\t\/\/if err := t.installKey(t.repository.SSHKey, db.AccessKeyUsagePrivateKey); err != nil {\n\tif err := t.repository.SSHKey.Install(db.AccessKeyUsagePrivateKey); err != nil {\n\t\tt.Log(\"Failed installing ssh key for repository access: \" + err.Error())\n\t\tt.fail()\n\t\treturn\n\t}\n\n\tif strings.HasPrefix(t.repository.GitURL, gitURLFilePrefix) {\n\t\trepositoryPath := strings.TrimPrefix(t.repository.GitURL, gitURLFilePrefix)\n\t\tif _, err := os.Stat(repositoryPath); err != nil {\n\t\t\tt.Log(\"Failed in finding static repository at \" + repositoryPath + \": \" + err.Error())\n\t\t\tt.fail()\n\t\t\treturn\n\t\t}\n\t} else {\n\t\tif err := t.updateRepository(); err != nil {\n\t\t\tt.Log(\"Failed updating repository: \" + err.Error())\n\t\t\tt.fail()\n\t\t\treturn\n\t\t}\n\t}\n\n\tif err := t.checkoutRepository(); err != nil {\n\t\tt.Log(\"Failed to checkout repository to required commit: \" + err.Error())\n\t\tt.fail()\n\t\treturn\n\t}\n\n\tif err := t.installInventory(); err != nil {\n\t\tt.Log(\"Failed to install inventory: \" + err.Error())\n\t\tt.fail()\n\t\treturn\n\t}\n\n\tif err := t.installRequirements(); err != nil {\n\t\tt.Log(\"Running galaxy failed: \" + err.Error())\n\t\tt.fail()\n\t\treturn\n\t}\n\n\tif err := t.installVaultKeyFile(); err != nil {\n\t\tt.Log(\"Failed to install vault password file: \" + err.Error())\n\t\tt.fail()\n\t\treturn\n\t}\n\n\tif err := t.listPlaybookHosts(); err != nil {\n\t\tt.Log(\"Listing playbook hosts failed: \" + err.Error())\n\t\tt.fail()\n\t\treturn\n\t}\n\n\tt.prepared = true\n}\n\nfunc (t *TaskRunner) run() {\n\tdefer func() {\n\t\tlog.Info(\"Stopped running TaskRunner \" + strconv.Itoa(t.task.ID))\n\t\tlog.Info(\"Release resource locker with TaskRunner \" + strconv.Itoa(t.task.ID))\n\t\tresourceLocker <- &resourceLock{lock: false, holder: t}\n\n\t\tnow := time.Now()\n\t\tt.task.End = &now\n\t\tt.updateStatus()\n\t\tt.createTaskEvent()\n\t\tt.destroyKeys()\n\t}()\n\n\t\/\/ TODO: more details\n\tif t.task.Status == db.TaskStoppingStatus {\n\t\tt.setStatus(db.TaskStoppedStatus)\n\t\treturn\n\t}\n\n\tnow := time.Now()\n\tt.task.Start = &now\n\tt.setStatus(db.TaskRunningStatus)\n\n\tobjType := db.EventTask\n\tdesc := \"Task ID \" + strconv.Itoa(t.task.ID) + \" (\" + t.template.Name + \")\" + \" is running\"\n\n\t_, err := t.pool.store.CreateEvent(db.Event{\n\t\tUserID: t.task.UserID,\n\t\tProjectID: &t.task.ProjectID,\n\t\tObjectType: &objType,\n\t\tObjectID: &t.task.ID,\n\t\tDescription: &desc,\n\t})\n\n\tif err != nil {\n\t\tt.Log(\"Fatal error inserting an event\")\n\t\tpanic(err)\n\t}\n\n\tt.Log(\"Started: \" + strconv.Itoa(t.task.ID))\n\tt.Log(\"Run TaskRunner with template: \" + t.template.Name + \"\\n\")\n\n\t\/\/ TODO: ?????\n\tif t.task.Status == db.TaskStoppingStatus {\n\t\tt.setStatus(db.TaskStoppedStatus)\n\t\treturn\n\t}\n\n\terr = t.runPlaybook()\n\tif err != nil {\n\t\tt.Log(\"Running playbook failed: \" + err.Error())\n\t\tt.fail()\n\t\treturn\n\t}\n\n\tt.setStatus(db.TaskSuccessStatus)\n\n\ttemplates, err := t.pool.store.GetTemplates(t.task.ProjectID, db.TemplateFilter{\n\t\tBuildTemplateID: &t.task.TemplateID,\n\t\tAutorunOnly: true,\n\t}, db.RetrieveQueryParams{})\n\tif err != nil {\n\t\tt.Log(\"Running playbook failed: \" + err.Error())\n\t\treturn\n\t}\n\n\tfor _, tpl := range templates {\n\t\t_, err = t.pool.AddTask(db.Task{\n\t\t\tTemplateID: tpl.ID,\n\t\t\tProjectID: tpl.ProjectID,\n\t\t\tBuildTaskID: &t.task.ID,\n\t\t}, nil, tpl.ProjectID)\n\t\tif err != nil {\n\t\t\tt.Log(\"Running playbook failed: \" + err.Error())\n\t\t\tcontinue\n\t\t}\n\t}\n}\n\nfunc (t *TaskRunner) prepareError(err error, errMsg string) error {\n\tif err == db.ErrNotFound {\n\t\tt.Log(errMsg)\n\t\treturn err\n\t}\n\n\tif err != nil {\n\t\tt.fail()\n\t\tpanic(err)\n\t}\n\n\treturn nil\n}\n\n\/\/nolint: gocyclo\nfunc (t *TaskRunner) populateDetails() error {\n\t\/\/ get template\n\tvar err error\n\n\tt.template, err = t.pool.store.GetTemplate(t.task.ProjectID, t.task.TemplateID)\n\tif err != nil {\n\t\treturn t.prepareError(err, \"Template not found!\")\n\t}\n\n\t\/\/ get project alert setting\n\tproject, err := t.pool.store.GetProject(t.template.ProjectID)\n\tif err != nil {\n\t\treturn t.prepareError(err, \"Project not found!\")\n\t}\n\n\tt.alert = project.Alert\n\tt.alertChat = project.AlertChat\n\n\t\/\/ get project users\n\tusers, err := t.pool.store.GetProjectUsers(t.template.ProjectID, db.RetrieveQueryParams{})\n\tif err != nil {\n\t\treturn t.prepareError(err, \"Users not found!\")\n\t}\n\n\tt.users = []int{}\n\tfor _, user := range users {\n\t\tt.users = append(t.users, user.ID)\n\t}\n\n\t\/\/ get inventory\n\tt.inventory, err = t.pool.store.GetInventory(t.template.ProjectID, t.template.InventoryID)\n\tif err != nil {\n\t\treturn t.prepareError(err, \"Template Inventory not found!\")\n\t}\n\n\t\/\/ get repository\n\tt.repository, err = t.pool.store.GetRepository(t.template.ProjectID, t.template.RepositoryID)\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t\/\/ get environment\n\tif t.template.EnvironmentID != nil {\n\t\tt.environment, err = t.pool.store.GetEnvironment(t.template.ProjectID, *t.template.EnvironmentID)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tif t.task.Environment != \"\" {\n\t\tenvironment := make(map[string]interface{})\n\t\tif t.environment.JSON != \"\" {\n\t\t\terr = json.Unmarshal([]byte(t.task.Environment), &environment)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\n\t\ttaskEnvironment := make(map[string]interface{})\n\t\terr = json.Unmarshal([]byte(t.environment.JSON), &taskEnvironment)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tfor k, v := range taskEnvironment {\n\t\t\tenvironment[k] = v\n\t\t}\n\n\t\tvar ev []byte\n\t\tev, err = json.Marshal(environment)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tt.environment.JSON = string(ev)\n\t}\n\n\treturn nil\n}\n\nfunc (t *TaskRunner) installVaultKeyFile() error {\n\tif t.template.VaultKeyID == nil {\n\t\treturn nil\n\t}\n\n\treturn t.template.VaultKey.Install(db.AccessKeyUsageVault)\n}\n\nfunc (t *TaskRunner) checkoutRepository() error {\n\trepo := lib.GitRepository{\n\t\tLogger: t,\n\t\tTemplateID: t.template.ID,\n\t\tRepository: t.repository,\n\t}\n\n\terr := repo.ValidateRepo()\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif t.task.CommitHash != nil {\n\t\t\/\/ checkout to commit if it is provided for TaskRunner\n\t\treturn repo.Checkout(*t.task.CommitHash)\n\t}\n\n\t\/\/ store commit to TaskRunner table\n\n\tcommitHash, err := repo.GetLastCommitHash()\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tcommitMessage, _ := repo.GetLastCommitMessage()\n\n\tt.task.CommitHash = &commitHash\n\tt.task.CommitMessage = commitMessage\n\n\treturn t.pool.store.UpdateTask(t.task)\n}\n\nfunc (t *TaskRunner) updateRepository() error {\n\trepo := lib.GitRepository{\n\t\tLogger: t,\n\t\tTemplateID: t.template.ID,\n\t\tRepository: t.repository,\n\t}\n\n\terr := repo.ValidateRepo()\n\n\tif err != nil {\n\t\tif !os.IsNotExist(err) {\n\t\t\terr = os.RemoveAll(repo.GetFullPath())\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t\treturn repo.Clone()\n\t}\n\n\tif repo.CanBePulled() {\n\t\terr = repo.Pull()\n\t\tif err == nil {\n\t\t\treturn nil\n\t\t}\n\t}\n\n\terr = os.RemoveAll(repo.GetFullPath())\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn repo.Clone()\n}\n\nfunc (t *TaskRunner) installRequirements() error {\n\trequirementsFilePath := fmt.Sprintf(\"%s\/roles\/requirements.yml\", t.getRepoPath())\n\trequirementsHashFilePath := fmt.Sprintf(\"%s\/requirements.md5\", t.getRepoPath())\n\n\tif _, err := os.Stat(requirementsFilePath); err != nil {\n\t\tt.Log(\"No roles\/requirements.yml file found. Skip galaxy install process.\\n\")\n\t\treturn nil\n\t}\n\n\tif hasRequirementsChanges(requirementsFilePath, requirementsHashFilePath) {\n\t\tif err := t.runGalaxy([]string{\n\t\t\t\"install\",\n\t\t\t\"-r\",\n\t\t\t\"roles\/requirements.yml\",\n\t\t\t\"--force\",\n\t\t}); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif err := writeMD5Hash(requirementsFilePath, requirementsHashFilePath); err != nil {\n\t\t\treturn err\n\t\t}\n\t} else {\n\t\tt.Log(\"roles\/requirements.yml has no changes. Skip galaxy install process.\\n\")\n\t}\n\n\treturn nil\n}\n\nfunc (t *TaskRunner) runGalaxy(args []string) error {\n\treturn lib.AnsiblePlaybook{\n\t\tLogger: t,\n\t\tTemplateID: t.template.ID,\n\t\tRepository: t.repository,\n\t}.RunGalaxy(args)\n}\n\nfunc (t *TaskRunner) listPlaybookHosts() (err error) {\n\tif util.Config.ConcurrencyMode == \"project\" {\n\t\treturn\n\t}\n\n\targs, err := t.getPlaybookArgs()\n\tif err != nil {\n\t\treturn\n\t}\n\n\tt.hosts, err = lib.AnsiblePlaybook{\n\t\tLogger: t,\n\t\tTemplateID: t.template.ID,\n\t\tRepository: t.repository,\n\t}.GetHosts(args)\n\n\treturn\n}\n\nfunc (t *TaskRunner) runPlaybook() (err error) {\n\targs, err := t.getPlaybookArgs()\n\tif err != nil {\n\t\treturn\n\t}\n\n\tcmd, err := lib.AnsiblePlaybook{\n\t\tLogger: t,\n\t\tTemplateID: t.template.ID,\n\t\tRepository: t.repository,\n\t}.MakeRunCmd(args)\n\n\tif err != nil {\n\t\treturn\n\t}\n\n\tt.process = cmd.Process\n\n\treturn cmd.Wait()\n}\n\nfunc (t *TaskRunner) getExtraVars() (str string, err error) {\n\textraVars := make(map[string]interface{})\n\n\tif t.environment.JSON != \"\" {\n\t\terr = json.Unmarshal([]byte(t.environment.JSON), &extraVars)\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\t}\n\n\tdelete(extraVars, \"ENV\")\n\n\ttaskDetails := make(map[string]interface{})\n\n\tif t.task.Message != \"\" {\n\t\ttaskDetails[\"message\"] = t.task.Message\n\t}\n\n\tif t.task.UserID != nil {\n\t\tvar user db.User\n\t\tuser, err = t.pool.store.GetUser(*t.task.UserID)\n\t\tif err == nil {\n\t\t\ttaskDetails[\"username\"] = user.Username\n\t\t}\n\t}\n\n\tif t.template.Type != db.TemplateTask {\n\t\ttaskDetails[\"type\"] = t.template.Type\n\t\tincomingVersion := t.task.GetIncomingVersion(t.pool.store)\n\t\tif incomingVersion != nil {\n\t\t\ttaskDetails[\"incoming_version\"] = incomingVersion\n\t\t}\n\t\tif t.template.Type == db.TemplateBuild {\n\t\t\ttaskDetails[\"target_version\"] = t.task.Version\n\t\t}\n\t}\n\n\tvars := make(map[string]interface{})\n\tvars[\"task_details\"] = taskDetails\n\textraVars[\"semaphore_vars\"] = vars\n\n\tev, err := json.Marshal(extraVars)\n\tif err != nil {\n\t\treturn\n\t}\n\n\tstr = string(ev)\n\n\treturn\n}\n\n\/\/nolint: gocyclo\nfunc (t *TaskRunner) getPlaybookArgs() (args []string, err error) {\n\tplaybookName := t.task.Playbook\n\tif playbookName == \"\" {\n\t\tplaybookName = t.template.Playbook\n\t}\n\n\tvar inventory string\n\tswitch t.inventory.Type {\n\tcase db.InventoryFile:\n\t\tinventory = t.inventory.Inventory\n\tdefault:\n\t\tinventory = util.Config.TmpPath + \"\/inventory_\" + strconv.Itoa(t.task.ID)\n\t}\n\n\targs = []string{\n\t\t\"-i\", inventory,\n\t}\n\n\tif t.inventory.SSHKeyID != nil {\n\t\tswitch t.inventory.SSHKey.Type {\n\t\tcase db.AccessKeySSH:\n\t\t\targs = append(args, \"--private-key=\"+t.inventory.SSHKey.GetPath())\n\t\tcase db.AccessKeyLoginPassword:\n\t\t\targs = append(args, \"--extra-vars=@\"+t.inventory.SSHKey.GetPath())\n\t\tcase db.AccessKeyNone:\n\t\tdefault:\n\t\t\terr = fmt.Errorf(\"access key does not suite for inventory's User Access Key\")\n\t\t\treturn\n\t\t}\n\t}\n\n\tif t.inventory.BecomeKeyID != nil {\n\t\tswitch t.inventory.BecomeKey.Type {\n\t\tcase db.AccessKeyLoginPassword:\n\t\t\targs = append(args, \"--extra-vars=@\"+t.inventory.BecomeKey.GetPath())\n\t\tcase db.AccessKeyNone:\n\t\tdefault:\n\t\t\terr = fmt.Errorf(\"access key does not suite for inventory's Become User Access Key\")\n\t\t\treturn\n\t\t}\n\t}\n\n\tif t.task.Debug {\n\t\targs = append(args, \"-vvvv\")\n\t}\n\n\tif t.task.DryRun {\n\t\targs = append(args, \"--check\")\n\t}\n\n\tif t.template.VaultKeyID != nil {\n\t\targs = append(args, \"--vault-password-file\", t.template.VaultKey.GetPath())\n\t}\n\n\textraVars, err := t.getExtraVars()\n\tif err != nil {\n\t\tt.Log(err.Error())\n\t\tt.Log(\"Could not remove command environment, if existant it will be passed to --extra-vars. This is not fatal but be aware of side effects\")\n\t} else if extraVars != \"\" {\n\t\targs = append(args, \"--extra-vars\", extraVars)\n\t}\n\n\tvar templateExtraArgs []string\n\tif t.template.Arguments != nil {\n\t\terr = json.Unmarshal([]byte(*t.template.Arguments), &templateExtraArgs)\n\t\tif err != nil {\n\t\t\tt.Log(\"Invalid format of the template extra arguments, must be valid JSON\")\n\t\t\treturn\n\t\t}\n\t}\n\n\tvar taskExtraArgs []string\n\n\tif t.template.AllowOverrideArgsInTask && t.task.Arguments != nil {\n\t\terr = json.Unmarshal([]byte(*t.task.Arguments), &taskExtraArgs)\n\t\tif err != nil {\n\t\t\tt.Log(\"Invalid format of the TaskRunner extra arguments, must be valid JSON\")\n\t\t\treturn\n\t\t}\n\t}\n\n\targs = append(args, templateExtraArgs...)\n\targs = append(args, taskExtraArgs...)\n\targs = append(args, playbookName)\n\n\treturn\n}\n\nfunc hasRequirementsChanges(requirementsFilePath string, requirementsHashFilePath string) bool {\n\toldFileMD5HashBytes, err := ioutil.ReadFile(requirementsHashFilePath)\n\tif err != nil {\n\t\treturn true\n\t}\n\n\tnewFileMD5Hash, err := getMD5Hash(requirementsFilePath)\n\tif err != nil {\n\t\treturn true\n\t}\n\n\treturn string(oldFileMD5HashBytes) != newFileMD5Hash\n}\n\nfunc writeMD5Hash(requirementsFile string, requirementsHashFile string) error {\n\tnewFileMD5Hash, err := getMD5Hash(requirementsFile)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn ioutil.WriteFile(requirementsHashFile, []byte(newFileMD5Hash), 0644)\n}\n\n\/\/ checkTmpDir checks to see if the temporary directory exists\n\/\/ and if it does not attempts to create it\nfunc checkTmpDir(path string) error {\n\tvar err error\n\tif _, err = os.Stat(path); err != nil {\n\t\tif os.IsNotExist(err) {\n\t\t\treturn os.MkdirAll(path, 0700)\n\t\t}\n\t}\n\treturn err\n}\n","avg_line_length":22.3950456323,"max_line_length":142,"alphanum_fraction":0.6659486523} +{"size":3875,"ext":"go","lang":"Go","max_stars_count":2282.0,"content":"\/\/ Copyright 2015, Yahoo Inc. All rights reserved.\n\/\/ Use of this source code is governed by a BSD-style\n\/\/ license that can be found in the LICENSE file.\n\npackage main\n\nimport (\n\t\"flag\"\n\t\"fmt\"\n\t\"io\"\n\t\"net\"\n\t\"net\/http\"\n\t\"os\"\n\t\"sync\"\n\t\"time\"\n\n\t\"github.com\/yahoo\/gryffin\"\n\t\"github.com\/yahoo\/gryffin\/fuzzer\/arachni\"\n\t\"github.com\/yahoo\/gryffin\/fuzzer\/sqlmap\"\n\t\"github.com\/yahoo\/gryffin\/renderer\"\n)\n\nvar method = flag.String(\"method\", \"GET\", \"the HTTP method for the request.\")\nvar url string\nvar body = flag.String(\"data\", \"\", \"the data used in a (POST) request.\")\n\nfunc usage() {\n\tfmt.Fprintf(os.Stderr, \"Usage of %s:\\n\", os.Args[0])\n\tfmt.Fprintf(os.Stderr, \"\\tgryffin-standalone [flags] seed-url\\n\")\n\tfmt.Fprintf(os.Stderr, \"Flags:\\n\")\n\tflag.PrintDefaults()\n}\n\n\/\/ THIS IS BAD CODE per https:\/\/blog.golang.org\/pipelines, and is created for prototyping.\n\/\/ In production, we will move the channels out and use message queue instead.\nfunc linkChannels(s *gryffin.Scan) {\n\n\tvar wg sync.WaitGroup\n\n\tchanStart := make(chan *gryffin.Scan, 10)\n\tchanRateLimit := make(chan *gryffin.Scan, 10)\n\tchanCrawl := make(chan *gryffin.Scan, 10)\n\tchanFuzz := make(chan *gryffin.Scan, 10)\n\t\/\/ defer close(chanStart)\n\tdefer close(chanRateLimit)\n\tdefer close(chanCrawl)\n\tdefer close(chanFuzz)\n\n\t\/\/ TODO - name all of these functions.\n\n\t\/\/ Crawl -> Filter by Domain \/ Rate Limit\n\tgo func() {\n\n\t\tfor scan := range chanCrawl {\n\t\t\tr := &renderer.PhantomJSRenderer{Timeout: 10}\n\t\t\tscan.CrawlAsync(r)\n\n\t\t\tgo func() {\n\t\t\t\tif s := <-r.GetRequestBody(); s != nil {\n\t\t\t\t\t\/\/ add two workers (two fuzzers)\n\t\t\t\t\twg.Add(2)\n\t\t\t\t\tchanFuzz <- s\n\t\t\t\t}\n\n\t\t\t}()\n\n\t\t\tscan := scan \/\/ prevent capturing by goroutine below\n\t\t\tgo func() {\n\t\t\t\t\/\/\n\t\t\t\t\/\/ Renderer will close all channels when a page is duplicated.\n\t\t\t\t\/\/ Therefore we don't need to test whether the link is coming\n\t\t\t\t\/\/ from a duplicated page or not\n\t\t\t\tfor newScan := range r.GetLinks() {\n\t\t\t\t\tif ok := newScan.ShouldCrawl(); ok {\n\t\t\t\t\t\t\/\/ add one workers (a new crawl)\n\t\t\t\t\t\twg.Add(1)\n\t\t\t\t\t\tchanRateLimit <- newScan\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\t\/\/ remove one worker (finish crawl)\n\t\t\t\twg.Done()\n\t\t\t\tscan.Logm(\"Get Links\", \"Finished\")\n\n\t\t\t}()\n\n\t\t}\n\n\t}()\n\n\tgo func() {\n\t\tfor scan := range chanFuzz {\n\t\t\tscan := scan \/\/ prevent capture by func literal below\n\t\t\tgo func() {\n\t\t\t\tf := &arachni.Fuzzer{}\n\t\t\t\tf.Fuzz(scan)\n\t\t\t\t\/\/ remove a fuzzer worker.\n\t\t\t\twg.Done()\n\t\t\t}()\n\t\t\tgo func() {\n\t\t\t\tf := &sqlmap.Fuzzer{}\n\t\t\t\tf.Fuzz(scan)\n\t\t\t\t\/\/ remove a fuzzer worker.\n\t\t\t\twg.Done()\n\t\t\t}()\n\t\t}\n\n\t}()\n\n\t\/\/ Rate Limit -> Crawl\n\tgo func() {\n\t\tfor scan := range chanRateLimit {\n\t\t\tif delay := scan.RateLimit(); delay != 0 {\n\t\t\t\tgo func() {\n\t\t\t\t\ttime.Sleep(time.Duration(delay) * time.Second)\n\t\t\t\t\tchanRateLimit <- scan\n\t\t\t\t}()\n\t\t\t\t\/\/ TODO queue it again.\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tchanCrawl <- scan\n\t\t}\n\t}()\n\n\t\/\/ Start, Poke -> RateLimit\n\tgo func() {\n\t\tfor scan := range chanStart {\n\t\t\t\/\/ TODO: add error handling\n\t\t\t\/\/ err := scan.Poke(&http.Client{})\n\t\t\t_ = scan.Poke(&http.Client{})\n\t\t\t\/\/ if err != nil {\n\t\t\t\/\/ if scan.HitCount <= 5 {\n\t\t\t\/\/ \tgo func() {\n\t\t\t\/\/ \t\ttime.Sleep(5 * time.Second)\n\t\t\t\/\/ \t\tchanStart <- scan\n\t\t\t\/\/ \t}()\n\t\t\t\/\/ }\n\t\t\t\/\/ continue\n\t\t\t\/\/ }\n\t\t\tchanRateLimit <- scan\n\t\t}\n\t}()\n\n\tchanStart <- s\n\tclose(chanStart)\n\n\t\/\/ add one worker (start crawl)\n\twg.Add(1)\n\twg.Wait()\n}\n\nfunc main() {\n\n\tflag.Usage = usage\n\tflag.Parse()\n\n\tswitch flag.NArg() {\n\tcase 1:\n\t\turl = flag.Arg(0)\n\tdefault:\n\t\tusage()\n\t\treturn\n\n\t}\n\n\tfmt.Println(\"=== Running Gryffin ===\")\n\n\tvar w io.Writer\n\t\/\/ TCP port listening messages.\n\ttcpout, err := net.Dial(\"tcp\", \"localhost:5000\")\n\tif err != nil {\n\t\t\/\/ fmt.Println(\"Cannot establish tcp connection to log listener.\")\n\t\tw = os.Stdout\n\t} else {\n\t\tw = io.MultiWriter(os.Stdout, tcpout)\n\t}\n\n\tgryffin.SetLogWriter(w)\n\n\tscan := gryffin.NewScan(*method, url, *body)\n\tscan.Logm(\"Main\", \"Started\")\n\n\tlinkChannels(scan)\n\n\tfmt.Println(\"=== End Running Gryffin ===\")\n\n}\n","avg_line_length":20.7219251337,"max_line_length":90,"alphanum_fraction":0.6170322581} +{"size":9258,"ext":"go","lang":"Go","max_stars_count":null,"content":"package collector_test\n\nimport (\n\t\"autoscaler\/cf\"\n\t. \"autoscaler\/metricscollector\/collector\"\n\t\"autoscaler\/metricscollector\/fakes\"\n\t\"autoscaler\/metricscollector\/noaa\"\n\t\"autoscaler\/models\"\n\n\t\"code.cloudfoundry.org\/clock\/fakeclock\"\n\t\"code.cloudfoundry.org\/lager\/lagertest\"\n\t\"github.com\/cloudfoundry\/sonde-go\/events\"\n\t. \"github.com\/onsi\/ginkgo\"\n\t. \"github.com\/onsi\/gomega\"\n\t\"github.com\/onsi\/gomega\/gbytes\"\n\n\t\"errors\"\n\t\"time\"\n)\n\nvar _ = Describe(\"AppStreamer\", func() {\n\n\tvar (\n\t\tcfc *fakes.FakeCFClient\n\t\tnoaaConsumer *fakes.FakeNoaaConsumer\n\t\tstreamer AppCollector\n\t\tbuffer *gbytes.Buffer\n\t\tmsgChan chan *events.Envelope\n\t\terrChan chan error\n\t\tfclock *fakeclock.FakeClock\n\t\tdataChan chan *models.AppInstanceMetric\n\t)\n\n\tBeforeEach(func() {\n\t\tcfc = &fakes.FakeCFClient{}\n\t\tnoaaConsumer = &fakes.FakeNoaaConsumer{}\n\n\t\tlogger := lagertest.NewTestLogger(\"AppStreamer-test\")\n\t\tbuffer = logger.Buffer()\n\t\tfclock = fakeclock.NewFakeClock(time.Now())\n\t\tdataChan = make(chan *models.AppInstanceMetric)\n\n\t\tstreamer = NewAppStreamer(logger, \"an-app-id\", TestCollectInterval, cfc, noaaConsumer, fclock, dataChan)\n\n\t\tmsgChan = make(chan *events.Envelope)\n\t\terrChan = make(chan error, 1)\n\t})\n\n\tDescribe(\"Start\", func() {\n\n\t\tJustBeforeEach(func() {\n\t\t\tstreamer.Start()\n\t\t})\n\n\t\tAfterEach(func() {\n\t\t\tstreamer.Stop()\n\t\t})\n\n\t\tBeforeEach(func() {\n\t\t\tcfc.GetTokensReturns(cf.Tokens{AccessToken: \"test-access-token\"})\n\t\t\tnoaaConsumer.StreamStub = func(appId string, authToken string) (outputChan <-chan *events.Envelope, errorChan <-chan error) {\n\t\t\t\tExpect(appId).To(Equal(\"an-app-id\"))\n\t\t\t\tExpect(authToken).To(Equal(\"Bearer test-access-token\"))\n\t\t\t\treturn msgChan, errChan\n\t\t\t}\n\t\t})\n\n\t\tContext(\"when there are containermetric events\", func() {\n\t\t\tBeforeEach(func() {\n\t\t\t\tgo func() {\n\t\t\t\t\tmsgChan <- noaa.NewContainerEnvelope(111111, \"an-app-id\", 0, 12.8, 100000000, 1000000000, 300000000, 2000000000)\n\t\t\t\t\tmsgChan <- noaa.NewContainerEnvelope(222222, \"an-app-id\", 1, 30.6, 200000000, 1000000000, 300000000, 2000000000)\n\t\t\t\t}()\n\t\t\t})\n\t\t\tIt(\"sends container metrics to channel\", func() {\n\t\t\t\tExpect(<-dataChan).To(Equal(&models.AppInstanceMetric{\n\t\t\t\t\tAppId: \"an-app-id\",\n\t\t\t\t\tInstanceIndex: 0,\n\t\t\t\t\tCollectedAt: fclock.Now().UnixNano(),\n\t\t\t\t\tName: models.MetricNameMemoryUsed,\n\t\t\t\t\tUnit: models.UnitMegaBytes,\n\t\t\t\t\tValue: \"95\",\n\t\t\t\t\tTimestamp: 111111,\n\t\t\t\t}))\n\t\t\t\tExpect(<-dataChan).To(Equal(&models.AppInstanceMetric{\n\t\t\t\t\tAppId: \"an-app-id\",\n\t\t\t\t\tInstanceIndex: 0,\n\t\t\t\t\tCollectedAt: fclock.Now().UnixNano(),\n\t\t\t\t\tName: models.MetricNameMemoryUtil,\n\t\t\t\t\tUnit: models.UnitPercentage,\n\t\t\t\t\tValue: \"33\",\n\t\t\t\t\tTimestamp: 111111,\n\t\t\t\t}))\n\t\t\t\tExpect(<-dataChan).To(Equal(&models.AppInstanceMetric{\n\t\t\t\t\tAppId: \"an-app-id\",\n\t\t\t\t\tInstanceIndex: 1,\n\t\t\t\t\tCollectedAt: fclock.Now().UnixNano(),\n\t\t\t\t\tName: models.MetricNameMemoryUsed,\n\t\t\t\t\tUnit: models.UnitMegaBytes,\n\t\t\t\t\tValue: \"191\",\n\t\t\t\t\tTimestamp: 222222,\n\t\t\t\t}))\n\t\t\t\tExpect(<-dataChan).To(Equal(&models.AppInstanceMetric{\n\t\t\t\t\tAppId: \"an-app-id\",\n\t\t\t\t\tInstanceIndex: 1,\n\t\t\t\t\tCollectedAt: fclock.Now().UnixNano(),\n\t\t\t\t\tName: models.MetricNameMemoryUtil,\n\t\t\t\t\tUnit: models.UnitPercentage,\n\t\t\t\t\tValue: \"67\",\n\t\t\t\t\tTimestamp: 222222,\n\t\t\t\t}))\n\n\t\t\t\tBy(\"collecting and computing throughput\")\n\t\t\t\tConsistently(dataChan).ShouldNot(Receive())\n\n\t\t\t\tBy(\"sending throughput after the collect interval\")\n\t\t\t\tfclock.WaitForWatcherAndIncrement(TestCollectInterval)\n\t\t\t\tExpect(<-dataChan).To(Equal(&models.AppInstanceMetric{\n\t\t\t\t\tAppId: \"an-app-id\",\n\t\t\t\t\tInstanceIndex: 0,\n\t\t\t\t\tCollectedAt: fclock.Now().UnixNano(),\n\t\t\t\t\tName: models.MetricNameThroughput,\n\t\t\t\t\tUnit: models.UnitRPS,\n\t\t\t\t\tValue: \"0\",\n\t\t\t\t\tTimestamp: fclock.Now().UnixNano(),\n\t\t\t\t}))\n\t\t\t})\n\t\t})\n\n\t\tContext(\"when there are httpstartstop events\", func() {\n\t\t\tIt(\"sends responsetime and throughput metrics to channel\", func() {\n\t\t\t\tmsgChan <- noaa.NewHttpStartStopEnvelope(111111, 100000000, 200000000, 0)\n\t\t\t\tmsgChan <- noaa.NewHttpStartStopEnvelope(222222, 300000000, 600000000, 0)\n\n\t\t\t\tfclock.WaitForWatcherAndIncrement(TestCollectInterval)\n\n\t\t\t\tExpect(<-dataChan).To(Equal(&models.AppInstanceMetric{\n\t\t\t\t\tAppId: \"an-app-id\",\n\t\t\t\t\tInstanceIndex: 0,\n\t\t\t\t\tCollectedAt: fclock.Now().UnixNano(),\n\t\t\t\t\tName: models.MetricNameThroughput,\n\t\t\t\t\tUnit: models.UnitRPS,\n\t\t\t\t\tValue: \"2\",\n\t\t\t\t\tTimestamp: fclock.Now().UnixNano(),\n\t\t\t\t}))\n\t\t\t\tExpect(<-dataChan).To(Equal(&models.AppInstanceMetric{\n\t\t\t\t\tAppId: \"an-app-id\",\n\t\t\t\t\tInstanceIndex: 0,\n\t\t\t\t\tCollectedAt: fclock.Now().UnixNano(),\n\t\t\t\t\tName: models.MetricNameResponseTime,\n\t\t\t\t\tUnit: models.UnitMilliseconds,\n\t\t\t\t\tValue: \"200\",\n\t\t\t\t\tTimestamp: fclock.Now().UnixNano(),\n\t\t\t\t}))\n\n\t\t\t\tmsgChan <- noaa.NewHttpStartStopEnvelope(333333, 100000000, 300000000, 1)\n\t\t\t\tmsgChan <- noaa.NewHttpStartStopEnvelope(555555, 300000000, 600000000, 1)\n\t\t\t\tmsgChan <- noaa.NewHttpStartStopEnvelope(666666, 300000000, 700000000, 1)\n\t\t\t\tConsistently(dataChan).ShouldNot(Receive())\n\n\t\t\t\tfclock.WaitForWatcherAndIncrement(TestCollectInterval)\n\n\t\t\t\tExpect(<-dataChan).To(Equal(&models.AppInstanceMetric{\n\t\t\t\t\tAppId: \"an-app-id\",\n\t\t\t\t\tInstanceIndex: 1,\n\t\t\t\t\tCollectedAt: fclock.Now().UnixNano(),\n\t\t\t\t\tName: models.MetricNameThroughput,\n\t\t\t\t\tUnit: models.UnitRPS,\n\t\t\t\t\tValue: \"3\",\n\t\t\t\t\tTimestamp: fclock.Now().UnixNano(),\n\t\t\t\t}))\n\t\t\t\tExpect(<-dataChan).To(Equal(&models.AppInstanceMetric{\n\t\t\t\t\tAppId: \"an-app-id\",\n\t\t\t\t\tInstanceIndex: 1,\n\t\t\t\t\tCollectedAt: fclock.Now().UnixNano(),\n\t\t\t\t\tName: models.MetricNameResponseTime,\n\t\t\t\t\tUnit: models.UnitMilliseconds,\n\t\t\t\t\tValue: \"300\",\n\t\t\t\t\tTimestamp: fclock.Now().UnixNano(),\n\t\t\t\t}))\n\t\t\t})\n\n\t\t\tContext(\"when the app has multiple instances\", func() {\n\t\t\t\tJustBeforeEach(func() {\n\t\t\t\t\tmsgChan <- noaa.NewHttpStartStopEnvelope(111111, 100000000, 200000000, 0)\n\t\t\t\t\tmsgChan <- noaa.NewHttpStartStopEnvelope(222222, 300000000, 500000000, 1)\n\t\t\t\t\tmsgChan <- noaa.NewHttpStartStopEnvelope(333333, 200000000, 600000000, 2)\n\t\t\t\t\tmsgChan <- noaa.NewHttpStartStopEnvelope(555555, 300000000, 500000000, 2)\n\t\t\t\t})\n\t\t\t\tIt(\"sends throughput and responsetime metrics of multiple instances to channel\", func() {\n\t\t\t\t\tfclock.WaitForWatcherAndIncrement(TestCollectInterval)\n\t\t\t\t\tEventually(dataChan).Should(Receive())\n\t\t\t\t\tEventually(dataChan).Should(Receive())\n\t\t\t\t\tEventually(dataChan).Should(Receive())\n\t\t\t\t\tEventually(dataChan).Should(Receive())\n\t\t\t\t\tEventually(dataChan).Should(Receive())\n\t\t\t\t\tEventually(dataChan).Should(Receive())\n\t\t\t\t\tConsistently(dataChan).ShouldNot(Receive())\n\t\t\t\t})\n\t\t\t})\n\t\t})\n\n\t\tContext(\"when there is no containermetrics or httpstartstop event\", func() {\n\t\t\tBeforeEach(func() {\n\t\t\t\tgo func() {\n\t\t\t\t\teventType := events.Envelope_CounterEvent\n\t\t\t\t\tmsgChan <- &events.Envelope{EventType: &eventType}\n\t\t\t\t}()\n\t\t\t})\n\t\t\tIt(\"Sends zero throughput metric to channel\", func() {\n\n\t\t\t\tBy(\"sending throughput after the collect interval\")\n\t\t\t\tfclock.WaitForWatcherAndIncrement(TestCollectInterval)\n\t\t\t\tExpect(<-dataChan).To(Equal(&models.AppInstanceMetric{\n\t\t\t\t\tAppId: \"an-app-id\",\n\t\t\t\t\tInstanceIndex: 0,\n\t\t\t\t\tCollectedAt: fclock.Now().UnixNano(),\n\t\t\t\t\tName: models.MetricNameThroughput,\n\t\t\t\t\tUnit: models.UnitRPS,\n\t\t\t\t\tValue: \"0\",\n\t\t\t\t\tTimestamp: fclock.Now().UnixNano(),\n\t\t\t\t}))\n\t\t\t})\n\t\t})\n\t\tContext(\"when there is error streaming events\", func() {\n\t\t\tBeforeEach(func() {\n\t\t\t\terrChan <- errors.New(\"an error\")\n\t\t\t})\n\t\t\tIt(\"logs the error and reconnect in next tick\", func() {\n\t\t\t\tEventually(buffer).Should(gbytes.Say(\"stream-metrics\"))\n\t\t\t\tEventually(buffer).Should(gbytes.Say(\"an-app-id\"))\n\t\t\t\tEventually(buffer).Should(gbytes.Say(\"an error\"))\n\n\t\t\t\tfclock.WaitForWatcherAndIncrement(TestCollectInterval)\n\t\t\t\tEventually(noaaConsumer.CloseCallCount).Should(Equal(1))\n\t\t\t\tEventually(noaaConsumer.StreamCallCount).Should(Equal(2))\n\t\t\t\tEventually(buffer).Should(gbytes.Say(\"noaa-reconnected\"))\n\t\t\t\tEventually(buffer).Should(gbytes.Say(\"an-app-id\"))\n\t\t\t\tConsistently(buffer).ShouldNot(gbytes.Say(\"compute-and-save-metrics\"))\n\t\t\t})\n\t\t})\n\t})\n\n\tDescribe(\"Stop\", func() {\n\t\tBeforeEach(func() {\n\t\t\tstreamer.Start()\n\t\t})\n\t\tJustBeforeEach(func() {\n\t\t\tstreamer.Stop()\n\t\t})\n\t\tIt(\"stops the streaming\", func() {\n\t\t\tEventually(buffer).Should(gbytes.Say(\"app-streamer-stopped\"))\n\t\t\tEventually(buffer).Should(gbytes.Say(\"an-app-id\"))\n\t\t})\n\t\tContext(\"when error occurs closing the connection\", func() {\n\t\t\tBeforeEach(func() {\n\t\t\t\tnoaaConsumer.CloseReturns(errors.New(\"an error\"))\n\t\t\t})\n\t\t\tIt(\"logs the error\", func() {\n\t\t\t\tEventually(buffer).Should(gbytes.Say(\"close-noaa-connection\"))\n\t\t\t\tEventually(buffer).Should(gbytes.Say(\"an-app-id\"))\n\t\t\t\tEventually(buffer).Should(gbytes.Say(\"an error\"))\n\t\t\t})\n\t\t})\n\t\tContext(\"when closing the connection succeeds\", func() {\n\t\t\tIt(\"logs the message\", func() {\n\t\t\t\tEventually(buffer).Should(gbytes.Say(\"noaa-connection-closed\"))\n\t\t\t\tEventually(buffer).Should(gbytes.Say(\"an-app-id\"))\n\t\t\t\tEventually(buffer).Should(gbytes.Say(\"app-streamer-stopped\"))\n\t\t\t})\n\t\t})\n\n\t})\n\n})\n","avg_line_length":33.5434782609,"max_line_length":128,"alphanum_fraction":0.652624757} +{"size":2302,"ext":"go","lang":"Go","max_stars_count":null,"content":"\/*\n * Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\")\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\/\npackage codec\n\nimport (\n\t\"github.com\/JorgenPo\/hazelcast-go-client\/internal\/proto\"\n\tiserialization \"github.com\/JorgenPo\/hazelcast-go-client\/internal\/serialization\"\n)\n\nconst (\n\t\/\/ hex: 0x030E00\n\tQueueCompareAndRetainAllCodecRequestMessageType = int32(200192)\n\t\/\/ hex: 0x030E01\n\tQueueCompareAndRetainAllCodecResponseMessageType = int32(200193)\n\n\tQueueCompareAndRetainAllCodecRequestInitialFrameSize = proto.PartitionIDOffset + proto.IntSizeInBytes\n\n\tQueueCompareAndRetainAllResponseResponseOffset = proto.ResponseBackupAcksOffset + proto.ByteSizeInBytes\n)\n\n\/\/ Retains only the elements in this collection that are contained in the specified collection (optional operation).\n\/\/ In other words, removes from this collection all of its elements that are not contained in the specified collection.\n\nfunc EncodeQueueCompareAndRetainAllRequest(name string, dataList []*iserialization.Data) *proto.ClientMessage {\n\tclientMessage := proto.NewClientMessageForEncode()\n\tclientMessage.SetRetryable(false)\n\n\tinitialFrame := proto.NewFrameWith(make([]byte, QueueCompareAndRetainAllCodecRequestInitialFrameSize), proto.UnfragmentedMessage)\n\tclientMessage.AddFrame(initialFrame)\n\tclientMessage.SetMessageType(QueueCompareAndRetainAllCodecRequestMessageType)\n\tclientMessage.SetPartitionId(-1)\n\n\tEncodeString(clientMessage, name)\n\tEncodeListMultiFrameForData(clientMessage, dataList)\n\n\treturn clientMessage\n}\n\nfunc DecodeQueueCompareAndRetainAllResponse(clientMessage *proto.ClientMessage) bool {\n\tframeIterator := clientMessage.FrameIterator()\n\tinitialFrame := frameIterator.Next()\n\n\treturn FixSizedTypesCodec.DecodeBoolean(initialFrame.Content, QueueCompareAndRetainAllResponseResponseOffset)\n}\n","avg_line_length":39.6896551724,"max_line_length":130,"alphanum_fraction":0.816246742} +{"size":4765,"ext":"go","lang":"Go","max_stars_count":5.0,"content":"\/\/ Copyright 2017 Google Inc. All Rights Reserved.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\npackage firestore\n\nimport (\n\t\"errors\"\n\t\"fmt\"\n\t\"time\"\n\n\tpb \"google.golang.org\/genproto\/googleapis\/firestore\/v1beta1\"\n\n\t\"github.com\/golang\/protobuf\/ptypes\"\n)\n\n\/\/ A Precondition modifies a Firestore update or delete operation.\ntype Precondition interface {\n\t\/\/ Returns the corresponding Precondition proto.\n\tpreconditionProto() (*pb.Precondition, error)\n}\n\n\/\/ Exists returns a Precondition that checks for the existence or non-existence\n\/\/ of a resource before writing to it. If the check fails, the write does not occur.\nfunc Exists(b bool) Precondition { return exists(b) }\n\ntype exists bool\n\nfunc (e exists) preconditionProto() (*pb.Precondition, error) {\n\treturn &pb.Precondition{\n\t\tConditionType: &pb.Precondition_Exists{bool(e)},\n\t}, nil\n}\n\nfunc (e exists) String() string { return fmt.Sprintf(\"Exists(%t)\", e) }\n\n\/\/ LastUpdateTime returns a Precondition that checks that a resource must exist and\n\/\/ must have last been updated at the given time. If the check fails, the write\n\/\/ does not occur.\nfunc LastUpdateTime(t time.Time) Precondition { return lastUpdateTime(t) }\n\ntype lastUpdateTime time.Time\n\nfunc (u lastUpdateTime) preconditionProto() (*pb.Precondition, error) {\n\tts, err := ptypes.TimestampProto(time.Time(u))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &pb.Precondition{\n\t\tConditionType: &pb.Precondition_UpdateTime{ts},\n\t}, nil\n}\n\nfunc (u lastUpdateTime) String() string { return fmt.Sprintf(\"LastUpdateTime(%s)\", time.Time(u)) }\n\nfunc processPreconditionsForDelete(preconds []Precondition) (*pb.Precondition, error) {\n\t\/\/ At most one option permitted.\n\tswitch len(preconds) {\n\tcase 0:\n\t\treturn nil, nil\n\tcase 1:\n\t\treturn preconds[0].preconditionProto()\n\tdefault:\n\t\treturn nil, fmt.Errorf(\"firestore: conflicting preconditions: %+v\", preconds)\n\t}\n}\n\nfunc processPreconditionsForUpdate(preconds []Precondition) (*pb.Precondition, error) {\n\t\/\/ At most one option permitted, and it cannot be Exists.\n\tswitch len(preconds) {\n\tcase 0:\n\t\t\/\/ If the user doesn't provide any options, default to Exists(true).\n\t\treturn exists(true).preconditionProto()\n\tcase 1:\n\t\tif _, ok := preconds[0].(exists); ok {\n\t\t\treturn nil, errors.New(\"Cannot use Exists with Update\")\n\t\t}\n\t\treturn preconds[0].preconditionProto()\n\tdefault:\n\t\treturn nil, fmt.Errorf(\"firestore: conflicting preconditions: %+v\", preconds)\n\t}\n}\n\nfunc processPreconditionsForVerify(preconds []Precondition) (*pb.Precondition, error) {\n\t\/\/ At most one option permitted.\n\tswitch len(preconds) {\n\tcase 0:\n\t\treturn nil, nil\n\tcase 1:\n\t\treturn preconds[0].preconditionProto()\n\tdefault:\n\t\treturn nil, fmt.Errorf(\"firestore: conflicting preconditions: %+v\", preconds)\n\t}\n}\n\n\/\/ A SetOption modifies a Firestore set operation.\ntype SetOption interface {\n\tfieldPaths() (fps []FieldPath, all bool, err error)\n}\n\n\/\/ MergeAll is a SetOption that causes all the field paths given in the data argument\n\/\/ to Set to be overwritten. It is not supported for struct data.\nvar MergeAll SetOption = merge{all: true}\n\n\/\/ Merge returns a SetOption that causes only the given field paths to be\n\/\/ overwritten. Other fields on the existing document will be untouched. It is an\n\/\/ error if a provided field path does not refer to a value in the data passed to\n\/\/ Set.\nfunc Merge(fps ...FieldPath) SetOption {\n\tfor _, fp := range fps {\n\t\tif err := fp.validate(); err != nil {\n\t\t\treturn merge{err: err}\n\t\t}\n\t}\n\treturn merge{paths: fps}\n}\n\ntype merge struct {\n\tall bool\n\tpaths []FieldPath\n\terr error\n}\n\nfunc (m merge) String() string {\n\tif m.err != nil {\n\t\treturn fmt.Sprintf(\"\", m.err)\n\t}\n\tif m.all {\n\t\treturn \"MergeAll\"\n\t}\n\treturn fmt.Sprintf(\"Merge(%+v)\", m.paths)\n}\n\nfunc (m merge) fieldPaths() (fps []FieldPath, all bool, err error) {\n\tif m.err != nil {\n\t\treturn nil, false, m.err\n\t}\n\tif err := checkNoDupOrPrefix(m.paths); err != nil {\n\t\treturn nil, false, err\n\t}\n\tif m.all {\n\t\treturn nil, true, nil\n\t}\n\treturn m.paths, false, nil\n}\n\nfunc processSetOptions(opts []SetOption) (fps []FieldPath, all bool, err error) {\n\tswitch len(opts) {\n\tcase 0:\n\t\treturn nil, false, nil\n\tcase 1:\n\t\treturn opts[0].fieldPaths()\n\tdefault:\n\t\treturn nil, false, fmt.Errorf(\"conflicting options: %+v\", opts)\n\t}\n}\n","avg_line_length":28.5329341317,"max_line_length":98,"alphanum_fraction":0.7185729276} +{"size":416,"ext":"go","lang":"Go","max_stars_count":null,"content":"package photoprism\n\nimport \"strings\"\n\ntype ConvertJob struct {\n\timage *MediaFile\n\tconvert *Convert\n}\n\nfunc ConvertWorker(jobs <-chan ConvertJob) {\n\tfor job := range jobs {\n\t\tif _, err := job.convert.ToJpeg(job.image); err != nil {\n\t\t\tfileName := job.image.RelativeName(job.convert.conf.OriginalsPath())\n\t\t\tlog.Errorf(\"convert: could not create jpeg for %s (%s)\", fileName, strings.TrimSpace(err.Error()))\n\t\t}\n\t}\n}\n","avg_line_length":23.1111111111,"max_line_length":101,"alphanum_fraction":0.6971153846} +{"size":35722,"ext":"go","lang":"Go","max_stars_count":1.0,"content":"\/\/ +build unit\n\n\/\/ Copyright 2014-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\"). You may\n\/\/ not use this file except in compliance with the License. A copy of the\n\/\/ License is located at\n\/\/\n\/\/\thttp:\/\/aws.amazon.com\/apache2.0\/\n\/\/\n\/\/ or in the \"license\" file accompanying this file. This file is distributed\n\/\/ on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n\/\/ express or implied. See the License for the specific language governing\n\/\/ permissions and limitations under the License.\n\npackage ecsclient\n\nimport (\n\t\"errors\"\n\t\"fmt\"\n\t\"reflect\"\n\t\"strings\"\n\t\"testing\"\n\t\"time\"\n\n\t\"github.com\/golang\/mock\/gomock\"\n\t\"github.com\/stretchr\/testify\/assert\"\n\n\t\"github.com\/aws\/amazon-ecs-agent\/agent\/api\"\n\tapicontainer \"github.com\/aws\/amazon-ecs-agent\/agent\/api\/container\"\n\tapicontainerstatus \"github.com\/aws\/amazon-ecs-agent\/agent\/api\/container\/status\"\n\tapieni \"github.com\/aws\/amazon-ecs-agent\/agent\/api\/eni\"\n\t\"github.com\/aws\/amazon-ecs-agent\/agent\/api\/mocks\"\n\tapitaskstatus \"github.com\/aws\/amazon-ecs-agent\/agent\/api\/task\/status\"\n\t\"github.com\/aws\/amazon-ecs-agent\/agent\/async\"\n\t\"github.com\/aws\/amazon-ecs-agent\/agent\/async\/mocks\"\n\t\"github.com\/aws\/amazon-ecs-agent\/agent\/config\"\n\t\"github.com\/aws\/amazon-ecs-agent\/agent\/ec2\"\n\t\"github.com\/aws\/amazon-ecs-agent\/agent\/ec2\/mocks\"\n\t\"github.com\/aws\/amazon-ecs-agent\/agent\/ecs_client\/model\/ecs\"\n\t\"github.com\/aws\/aws-sdk-go\/aws\"\n\t\"github.com\/aws\/aws-sdk-go\/aws\/awserr\"\n\t\"github.com\/aws\/aws-sdk-go\/aws\/credentials\"\n)\n\nconst (\n\tconfiguredCluster = \"mycluster\"\n\tiid = \"instanceIdentityDocument\"\n\tiidSignature = \"signature\"\n\tregistrationToken = \"clientToken\"\n)\n\nvar (\n\tiidResponse = []byte(iid)\n\tiidSignatureResponse = []byte(iidSignature)\n\tcontainerInstanceTags = []*ecs.Tag{\n\t\t{\n\t\t\tKey: aws.String(\"my_key1\"),\n\t\t\tValue: aws.String(\"my_val1\"),\n\t\t},\n\t\t{\n\t\t\tKey: aws.String(\"my_key2\"),\n\t\t\tValue: aws.String(\"my_val2\"),\n\t\t},\n\t}\n\tcontainerInstanceTagsMap = map[string]string{\n\t\t\"my_key1\": \"my_val1\",\n\t\t\"my_key2\": \"my_val2\",\n\t}\n)\n\nfunc NewMockClient(ctrl *gomock.Controller,\n\tec2Metadata ec2.EC2MetadataClient,\n\tadditionalAttributes map[string]string) (api.ECSClient, *mock_api.MockECSSDK, *mock_api.MockECSSubmitStateSDK) {\n\n\treturn NewMockClientWithConfig(ctrl, ec2Metadata, additionalAttributes,\n\t\t&config.Config{\n\t\t\tCluster: configuredCluster,\n\t\t\tAWSRegion: \"us-east-1\",\n\t\t\tInstanceAttributes: additionalAttributes,\n\t\t})\n}\n\nfunc NewMockClientWithConfig(ctrl *gomock.Controller,\n\tec2Metadata ec2.EC2MetadataClient,\n\tadditionalAttributes map[string]string,\n\tcfg *config.Config) (api.ECSClient, *mock_api.MockECSSDK, *mock_api.MockECSSubmitStateSDK) {\n\tclient := NewECSClient(credentials.AnonymousCredentials, cfg, ec2Metadata)\n\tmockSDK := mock_api.NewMockECSSDK(ctrl)\n\tmockSubmitStateSDK := mock_api.NewMockECSSubmitStateSDK(ctrl)\n\tclient.(*APIECSClient).SetSDK(mockSDK)\n\tclient.(*APIECSClient).SetSubmitStateChangeSDK(mockSubmitStateSDK)\n\treturn client, mockSDK, mockSubmitStateSDK\n}\n\ntype containerSubmitInputMatcher struct {\n\tecs.SubmitContainerStateChangeInput\n}\n\ntype taskSubmitInputMatcher struct {\n\tecs.SubmitTaskStateChangeInput\n}\n\nfunc strptr(s string) *string { return &s }\nfunc intptr(i int) *int { return &i }\nfunc int64ptr(i *int) *int64 {\n\tif i == nil {\n\t\treturn nil\n\t}\n\tj := int64(*i)\n\treturn &j\n}\nfunc equal(lhs, rhs interface{}) bool {\n\treturn reflect.DeepEqual(lhs, rhs)\n}\nfunc (lhs *containerSubmitInputMatcher) Matches(x interface{}) bool {\n\trhs := x.(*ecs.SubmitContainerStateChangeInput)\n\n\treturn (equal(lhs.Cluster, rhs.Cluster) &&\n\t\tequal(lhs.ContainerName, rhs.ContainerName) &&\n\t\tequal(lhs.ExitCode, rhs.ExitCode) &&\n\t\tequal(lhs.NetworkBindings, rhs.NetworkBindings) &&\n\t\tequal(lhs.Reason, rhs.Reason) &&\n\t\tequal(lhs.Status, rhs.Status) &&\n\t\tequal(lhs.Task, rhs.Task))\n}\n\nfunc (lhs *containerSubmitInputMatcher) String() string {\n\treturn fmt.Sprintf(\"%+v\", *lhs)\n}\n\nfunc (lhs *taskSubmitInputMatcher) Matches(x interface{}) bool {\n\trhs := x.(*ecs.SubmitTaskStateChangeInput)\n\n\tif !(equal(lhs.Cluster, rhs.Cluster) &&\n\t\tequal(lhs.Task, rhs.Task) &&\n\t\tequal(lhs.Status, rhs.Status) &&\n\t\tequal(lhs.Reason, rhs.Reason) &&\n\t\tequal(len(lhs.Attachments), len(rhs.Attachments))) {\n\t\treturn false\n\t}\n\n\tif len(lhs.Attachments) != 0 {\n\t\tfor i := range lhs.Attachments {\n\t\t\tif !(equal(lhs.Attachments[i].Status, rhs.Attachments[i].Status) &&\n\t\t\t\tequal(lhs.Attachments[i].AttachmentArn, rhs.Attachments[i].AttachmentArn)) {\n\t\t\t\treturn false\n\t\t\t}\n\t\t}\n\t}\n\n\treturn true\n}\n\nfunc (lhs *taskSubmitInputMatcher) String() string {\n\treturn fmt.Sprintf(\"%+v\", *lhs)\n}\n\nfunc TestSubmitContainerStateChange(t *testing.T) {\n\tmockCtrl := gomock.NewController(t)\n\tdefer mockCtrl.Finish()\n\tclient, _, mockSubmitStateClient := NewMockClient(mockCtrl, ec2.NewBlackholeEC2MetadataClient(), nil)\n\tmockSubmitStateClient.EXPECT().SubmitContainerStateChange(&containerSubmitInputMatcher{\n\t\tecs.SubmitContainerStateChangeInput{\n\t\t\tCluster: strptr(configuredCluster),\n\t\t\tTask: strptr(\"arn\"),\n\t\t\tContainerName: strptr(\"cont\"),\n\t\t\tStatus: strptr(\"RUNNING\"),\n\t\t\tNetworkBindings: []*ecs.NetworkBinding{\n\t\t\t\t{\n\t\t\t\t\tBindIP: strptr(\"1.2.3.4\"),\n\t\t\t\t\tContainerPort: int64ptr(intptr(1)),\n\t\t\t\t\tHostPort: int64ptr(intptr(2)),\n\t\t\t\t\tProtocol: strptr(\"tcp\"),\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\tBindIP: strptr(\"2.2.3.4\"),\n\t\t\t\t\tContainerPort: int64ptr(intptr(3)),\n\t\t\t\t\tHostPort: int64ptr(intptr(4)),\n\t\t\t\t\tProtocol: strptr(\"udp\"),\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t})\n\terr := client.SubmitContainerStateChange(api.ContainerStateChange{\n\t\tTaskArn: \"arn\",\n\t\tContainerName: \"cont\",\n\t\tStatus: apicontainerstatus.ContainerRunning,\n\t\tPortBindings: []apicontainer.PortBinding{\n\t\t\t{\n\t\t\t\tBindIP: \"1.2.3.4\",\n\t\t\t\tContainerPort: 1,\n\t\t\t\tHostPort: 2,\n\t\t\t},\n\t\t\t{\n\t\t\t\tBindIP: \"2.2.3.4\",\n\t\t\t\tContainerPort: 3,\n\t\t\t\tHostPort: 4,\n\t\t\t\tProtocol: apicontainer.TransportProtocolUDP,\n\t\t\t},\n\t\t},\n\t})\n\tif err != nil {\n\t\tt.Errorf(\"Unable to submit container state change: %v\", err)\n\t}\n}\n\nfunc TestSubmitContainerStateChangeFull(t *testing.T) {\n\tmockCtrl := gomock.NewController(t)\n\tdefer mockCtrl.Finish()\n\tclient, _, mockSubmitStateClient := NewMockClient(mockCtrl, ec2.NewBlackholeEC2MetadataClient(), nil)\n\texitCode := 20\n\treason := \"I exited\"\n\n\tmockSubmitStateClient.EXPECT().SubmitContainerStateChange(&containerSubmitInputMatcher{\n\t\tecs.SubmitContainerStateChangeInput{\n\t\t\tCluster: strptr(configuredCluster),\n\t\t\tTask: strptr(\"arn\"),\n\t\t\tContainerName: strptr(\"cont\"),\n\t\t\tStatus: strptr(\"STOPPED\"),\n\t\t\tExitCode: int64ptr(&exitCode),\n\t\t\tReason: strptr(reason),\n\t\t\tNetworkBindings: []*ecs.NetworkBinding{\n\t\t\t\t{\n\t\t\t\t\tBindIP: strptr(\"\"),\n\t\t\t\t\tContainerPort: int64ptr(intptr(0)),\n\t\t\t\t\tHostPort: int64ptr(intptr(0)),\n\t\t\t\t\tProtocol: strptr(\"tcp\"),\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t})\n\terr := client.SubmitContainerStateChange(api.ContainerStateChange{\n\t\tTaskArn: \"arn\",\n\t\tContainerName: \"cont\",\n\t\tStatus: apicontainerstatus.ContainerStopped,\n\t\tExitCode: &exitCode,\n\t\tReason: reason,\n\t\tPortBindings: []apicontainer.PortBinding{\n\t\t\t{},\n\t\t},\n\t})\n\tif err != nil {\n\t\tt.Errorf(\"Unable to submit container state change: %v\", err)\n\t}\n}\n\nfunc TestSubmitContainerStateChangeReason(t *testing.T) {\n\tmockCtrl := gomock.NewController(t)\n\tdefer mockCtrl.Finish()\n\tclient, _, mockSubmitStateClient := NewMockClient(mockCtrl, ec2.NewBlackholeEC2MetadataClient(), nil)\n\texitCode := 20\n\treason := strings.Repeat(\"a\", ecsMaxReasonLength)\n\n\tmockSubmitStateClient.EXPECT().SubmitContainerStateChange(&containerSubmitInputMatcher{\n\t\tecs.SubmitContainerStateChangeInput{\n\t\t\tCluster: strptr(configuredCluster),\n\t\t\tTask: strptr(\"arn\"),\n\t\t\tContainerName: strptr(\"cont\"),\n\t\t\tStatus: strptr(\"STOPPED\"),\n\t\t\tExitCode: int64ptr(&exitCode),\n\t\t\tReason: strptr(reason),\n\t\t\tNetworkBindings: []*ecs.NetworkBinding{},\n\t\t},\n\t})\n\terr := client.SubmitContainerStateChange(api.ContainerStateChange{\n\t\tTaskArn: \"arn\",\n\t\tContainerName: \"cont\",\n\t\tStatus: apicontainerstatus.ContainerStopped,\n\t\tExitCode: &exitCode,\n\t\tReason: reason,\n\t})\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n}\n\nfunc TestSubmitContainerStateChangeLongReason(t *testing.T) {\n\tmockCtrl := gomock.NewController(t)\n\tdefer mockCtrl.Finish()\n\tclient, _, mockSubmitStateClient := NewMockClient(mockCtrl, ec2.NewBlackholeEC2MetadataClient(), nil)\n\texitCode := 20\n\ttrimmedReason := strings.Repeat(\"a\", ecsMaxReasonLength)\n\treason := strings.Repeat(\"a\", ecsMaxReasonLength+1)\n\n\tmockSubmitStateClient.EXPECT().SubmitContainerStateChange(&containerSubmitInputMatcher{\n\t\tecs.SubmitContainerStateChangeInput{\n\t\t\tCluster: strptr(configuredCluster),\n\t\t\tTask: strptr(\"arn\"),\n\t\t\tContainerName: strptr(\"cont\"),\n\t\t\tStatus: strptr(\"STOPPED\"),\n\t\t\tExitCode: int64ptr(&exitCode),\n\t\t\tReason: strptr(trimmedReason),\n\t\t\tNetworkBindings: []*ecs.NetworkBinding{},\n\t\t},\n\t})\n\terr := client.SubmitContainerStateChange(api.ContainerStateChange{\n\t\tTaskArn: \"arn\",\n\t\tContainerName: \"cont\",\n\t\tStatus: apicontainerstatus.ContainerStopped,\n\t\tExitCode: &exitCode,\n\t\tReason: reason,\n\t})\n\tif err != nil {\n\t\tt.Errorf(\"Unable to submit container state change: %v\", err)\n\t}\n}\n\nfunc buildAttributeList(capabilities []string, attributes map[string]string) []*ecs.Attribute {\n\tvar rv []*ecs.Attribute\n\tfor _, capability := range capabilities {\n\t\trv = append(rv, &ecs.Attribute{Name: aws.String(capability)})\n\t}\n\tfor key, value := range attributes {\n\t\trv = append(rv, &ecs.Attribute{Name: aws.String(key), Value: aws.String(value)})\n\t}\n\treturn rv\n}\n\nfunc TestReRegisterContainerInstance(t *testing.T) {\n\tadditionalAttributes := map[string]string{\"my_custom_attribute\": \"Custom_Value1\",\n\t\t\"my_other_custom_attribute\": \"Custom_Value2\",\n\t\t\"attribute_name_with_no_value\": \"\",\n\t}\n\n\tmockCtrl := gomock.NewController(t)\n\tdefer mockCtrl.Finish()\n\tmockEC2Metadata := mock_ec2.NewMockEC2MetadataClient(mockCtrl)\n\tclient, mc, _ := NewMockClient(mockCtrl, mockEC2Metadata, additionalAttributes)\n\n\tfakeCapabilities := []string{\"capability1\", \"capability2\"}\n\texpectedAttributes := map[string]string{\n\t\t\"ecs.os-type\": config.OSType,\n\t\t\"ecs.availability-zone\": \"us-west-2b\",\n\t}\n\tfor i := range fakeCapabilities {\n\t\texpectedAttributes[fakeCapabilities[i]] = \"\"\n\t}\n\tcapabilities := buildAttributeList(fakeCapabilities, nil)\n\n\tgomock.InOrder(\n\t\tmockEC2Metadata.EXPECT().GetDynamicData(ec2.InstanceIdentityDocumentResource).Return(\"instanceIdentityDocument\", nil),\n\t\tmockEC2Metadata.EXPECT().GetDynamicData(ec2.InstanceIdentityDocumentSignatureResource).Return(\"signature\", nil),\n\t\tmc.EXPECT().RegisterContainerInstance(gomock.Any()).Do(func(req *ecs.RegisterContainerInstanceInput) {\n\t\t\tassert.Equal(t, \"arn:test\", *req.ContainerInstanceArn, \"Wrong container instance ARN\")\n\t\t\tassert.Equal(t, configuredCluster, *req.Cluster, \"Wrong cluster\")\n\t\t\tassert.Equal(t, registrationToken, *req.ClientToken, \"Wrong client token\")\n\t\t\tassert.Equal(t, iid, *req.InstanceIdentityDocument, \"Wrong IID\")\n\t\t\tassert.Equal(t, iidSignature, *req.InstanceIdentityDocumentSignature, \"Wrong IID sig\")\n\t\t\tassert.Equal(t, 4, len(req.TotalResources), \"Wrong length of TotalResources\")\n\t\t\tresource, ok := findResource(req.TotalResources, \"PORTS_UDP\")\n\t\t\tassert.True(t, ok, `Could not find resource \"PORTS_UDP\"`)\n\t\t\tassert.Equal(t, \"STRINGSET\", *resource.Type, `Wrong type for resource \"PORTS_UDP\"`)\n\t\t\t\/\/ \"ecs.os-type\" and the 2 that we specified as additionalAttributes\n\t\t\tassert.Equal(t, 3, len(req.Attributes), \"Wrong number of Attributes\")\n\t\t\treqAttributes := func() map[string]string {\n\t\t\t\trv := make(map[string]string, len(req.Attributes))\n\t\t\t\tfor i := range req.Attributes {\n\t\t\t\t\trv[aws.StringValue(req.Attributes[i].Name)] = aws.StringValue(req.Attributes[i].Value)\n\t\t\t\t}\n\t\t\t\treturn rv\n\t\t\t}()\n\t\t\tfor k, v := range reqAttributes {\n\t\t\t\tassert.Contains(t, expectedAttributes, k)\n\t\t\t\tassert.Equal(t, expectedAttributes[k], v)\n\t\t\t}\n\t\t\tassert.Equal(t, len(containerInstanceTags), len(req.Tags), \"Wrong number of tags\")\n\t\t\treqTags := extractTagsMapFromRegisterContainerInstanceInput(req)\n\t\t\tfor k, v := range reqTags {\n\t\t\t\tassert.Contains(t, containerInstanceTagsMap, k)\n\t\t\t\tassert.Equal(t, containerInstanceTagsMap[k], v)\n\t\t\t}\n\t\t}).Return(&ecs.RegisterContainerInstanceOutput{\n\t\t\tContainerInstance: &ecs.ContainerInstance{\n\t\t\t\tContainerInstanceArn: aws.String(\"registerArn\"),\n\t\t\t\tAttributes: buildAttributeList(fakeCapabilities, expectedAttributes),\n\t\t\t}},\n\t\t\tnil),\n\t)\n\n\tarn, availabilityzone, err := client.RegisterContainerInstance(\"arn:test\", capabilities, containerInstanceTags, registrationToken, nil)\n\n\tassert.NoError(t, err)\n\tassert.Equal(t, \"registerArn\", arn)\n\tassert.Equal(t, \"us-west-2b\", availabilityzone, \"availabilityZone is incorrect\")\n}\n\nfunc TestRegisterContainerInstance(t *testing.T) {\n\tmockCtrl := gomock.NewController(t)\n\tdefer mockCtrl.Finish()\n\tmockEC2Metadata := mock_ec2.NewMockEC2MetadataClient(mockCtrl)\n\tadditionalAttributes := map[string]string{\"my_custom_attribute\": \"Custom_Value1\",\n\t\t\"my_other_custom_attribute\": \"Custom_Value2\",\n\t}\n\tclient, mc, _ := NewMockClient(mockCtrl, mockEC2Metadata, additionalAttributes)\n\n\tfakeCapabilities := []string{\"capability1\", \"capability2\"}\n\texpectedAttributes := map[string]string{\n\t\t\"ecs.os-type\": config.OSType,\n\t\t\"my_custom_attribute\": \"Custom_Value1\",\n\t\t\"my_other_custom_attribute\": \"Custom_Value2\",\n\t\t\"ecs.availability-zone\": \"us-west-2b\",\n\t}\n\tcapabilities := buildAttributeList(fakeCapabilities, nil)\n\tplatformDevices := []*ecs.PlatformDevice{\n\t\t{\n\t\t\tId: aws.String(\"id1\"),\n\t\t\tType: aws.String(ecs.PlatformDeviceTypeGpu),\n\t\t},\n\t\t{\n\t\t\tId: aws.String(\"id2\"),\n\t\t\tType: aws.String(ecs.PlatformDeviceTypeGpu),\n\t\t},\n\t\t{\n\t\t\tId: aws.String(\"id3\"),\n\t\t\tType: aws.String(ecs.PlatformDeviceTypeGpu),\n\t\t},\n\t}\n\n\tgomock.InOrder(\n\t\tmockEC2Metadata.EXPECT().GetDynamicData(ec2.InstanceIdentityDocumentResource).Return(\"instanceIdentityDocument\", nil),\n\t\tmockEC2Metadata.EXPECT().GetDynamicData(ec2.InstanceIdentityDocumentSignatureResource).Return(\"signature\", nil),\n\t\tmc.EXPECT().RegisterContainerInstance(gomock.Any()).Do(func(req *ecs.RegisterContainerInstanceInput) {\n\t\t\tassert.Nil(t, req.ContainerInstanceArn)\n\t\t\tassert.Equal(t, configuredCluster, *req.Cluster, \"Wrong cluster\")\n\t\t\tassert.Equal(t, registrationToken, *req.ClientToken, \"Wrong client token\")\n\t\t\tassert.Equal(t, iid, *req.InstanceIdentityDocument, \"Wrong IID\")\n\t\t\tassert.Equal(t, iidSignature, *req.InstanceIdentityDocumentSignature, \"Wrong IID sig\")\n\t\t\tassert.Equal(t, 4, len(req.TotalResources), \"Wrong length of TotalResources\")\n\t\t\tresource, ok := findResource(req.TotalResources, \"PORTS_UDP\")\n\t\t\tassert.True(t, ok, `Could not find resource \"PORTS_UDP\"`)\n\t\t\tassert.Equal(t, \"STRINGSET\", *resource.Type, `Wrong type for resource \"PORTS_UDP\"`)\n\t\t\t\/\/ 3 from expectedAttributes and 2 from additionalAttributes\n\t\t\tassert.Equal(t, 5, len(req.Attributes), \"Wrong number of Attributes\")\n\t\t\tfor i := range req.Attributes {\n\t\t\t\tif strings.Contains(*req.Attributes[i].Name, \"capability\") {\n\t\t\t\t\tassert.Contains(t, fakeCapabilities, *req.Attributes[i].Name)\n\t\t\t\t} else {\n\t\t\t\t\tassert.Equal(t, expectedAttributes[*req.Attributes[i].Name], *req.Attributes[i].Value)\n\t\t\t\t}\n\t\t\t}\n\t\t\tassert.Equal(t, len(containerInstanceTags), len(req.Tags), \"Wrong number of tags\")\n\t\t\tassert.Equal(t, len(platformDevices), len(req.PlatformDevices), \"Wrong number of devices\")\n\t\t\treqTags := extractTagsMapFromRegisterContainerInstanceInput(req)\n\t\t\tfor k, v := range reqTags {\n\t\t\t\tassert.Contains(t, containerInstanceTagsMap, k)\n\t\t\t\tassert.Equal(t, containerInstanceTagsMap[k], v)\n\t\t\t}\n\t\t}).Return(&ecs.RegisterContainerInstanceOutput{\n\t\t\tContainerInstance: &ecs.ContainerInstance{\n\t\t\t\tContainerInstanceArn: aws.String(\"registerArn\"),\n\t\t\t\tAttributes: buildAttributeList(fakeCapabilities, expectedAttributes)}},\n\t\t\tnil),\n\t)\n\n\tarn, availabilityzone, err := client.RegisterContainerInstance(\"\", capabilities, containerInstanceTags, registrationToken, platformDevices)\n\tassert.NoError(t, err)\n\tassert.Equal(t, \"registerArn\", arn)\n\tassert.Equal(t, \"us-west-2b\", availabilityzone)\n}\n\nfunc TestRegisterContainerInstanceNoIID(t *testing.T) {\n\tmockCtrl := gomock.NewController(t)\n\tdefer mockCtrl.Finish()\n\tmockEC2Metadata := mock_ec2.NewMockEC2MetadataClient(mockCtrl)\n\tadditionalAttributes := map[string]string{\"my_custom_attribute\": \"Custom_Value1\",\n\t\t\"my_other_custom_attribute\": \"Custom_Value2\",\n\t}\n\tclient, mc, _ := NewMockClientWithConfig(mockCtrl, mockEC2Metadata, additionalAttributes,\n\t\t&config.Config{\n\t\t\tCluster: configuredCluster,\n\t\t\tAWSRegion: \"us-east-1\",\n\t\t\tInstanceAttributes: additionalAttributes,\n\t\t\tNoIID: true,\n\t\t})\n\n\tfakeCapabilities := []string{\"capability1\", \"capability2\"}\n\texpectedAttributes := map[string]string{\n\t\t\"ecs.os-type\": config.OSType,\n\t\t\"my_custom_attribute\": \"Custom_Value1\",\n\t\t\"my_other_custom_attribute\": \"Custom_Value2\",\n\t\t\"ecs.availability-zone\": \"us-west-2b\",\n\t}\n\tcapabilities := buildAttributeList(fakeCapabilities, nil)\n\n\tgomock.InOrder(\n\t\tmc.EXPECT().RegisterContainerInstance(gomock.Any()).Do(func(req *ecs.RegisterContainerInstanceInput) {\n\t\t\tassert.Nil(t, req.ContainerInstanceArn)\n\t\t\tassert.Equal(t, configuredCluster, *req.Cluster, \"Wrong cluster\")\n\t\t\tassert.Equal(t, registrationToken, *req.ClientToken, \"Wrong client token\")\n\t\t\tassert.Equal(t, \"\", *req.InstanceIdentityDocument, \"Wrong IID\")\n\t\t\tassert.Equal(t, \"\", *req.InstanceIdentityDocumentSignature, \"Wrong IID sig\")\n\t\t\tassert.Equal(t, 4, len(req.TotalResources), \"Wrong length of TotalResources\")\n\t\t\tresource, ok := findResource(req.TotalResources, \"PORTS_UDP\")\n\t\t\tassert.True(t, ok, `Could not find resource \"PORTS_UDP\"`)\n\t\t\tassert.Equal(t, \"STRINGSET\", *resource.Type, `Wrong type for resource \"PORTS_UDP\"`)\n\t\t\t\/\/ 3 from expectedAttributes and 2 from additionalAttributes\n\t\t\tassert.Equal(t, 5, len(req.Attributes), \"Wrong number of Attributes\")\n\t\t\tfor i := range req.Attributes {\n\t\t\t\tif strings.Contains(*req.Attributes[i].Name, \"capability\") {\n\t\t\t\t\tassert.Contains(t, fakeCapabilities, *req.Attributes[i].Name)\n\t\t\t\t} else {\n\t\t\t\t\tassert.Equal(t, expectedAttributes[*req.Attributes[i].Name], *req.Attributes[i].Value)\n\t\t\t\t}\n\t\t\t}\n\t\t\tassert.Equal(t, len(containerInstanceTags), len(req.Tags), \"Wrong number of tags\")\n\t\t\treqTags := extractTagsMapFromRegisterContainerInstanceInput(req)\n\t\t\tfor k, v := range reqTags {\n\t\t\t\tassert.Contains(t, containerInstanceTagsMap, k)\n\t\t\t\tassert.Equal(t, containerInstanceTagsMap[k], v)\n\t\t\t}\n\t\t}).Return(&ecs.RegisterContainerInstanceOutput{\n\t\t\tContainerInstance: &ecs.ContainerInstance{\n\t\t\t\tContainerInstanceArn: aws.String(\"registerArn\"),\n\t\t\t\tAttributes: buildAttributeList(fakeCapabilities, expectedAttributes)}},\n\t\t\tnil),\n\t)\n\n\tarn, availabilityzone, err := client.RegisterContainerInstance(\"\", capabilities, containerInstanceTags, registrationToken, nil)\n\tassert.NoError(t, err)\n\tassert.Equal(t, \"registerArn\", arn)\n\tassert.Equal(t, \"us-west-2b\", availabilityzone)\n}\n\n\/\/ TestRegisterContainerInstanceWithNegativeResource tests the registeration should fail with negative resource\nfunc TestRegisterContainerInstanceWithNegativeResource(t *testing.T) {\n\tmockCtrl := gomock.NewController(t)\n\tdefer mockCtrl.Finish()\n\n\t_, mem := getCpuAndMemory()\n\tmockEC2Metadata := mock_ec2.NewMockEC2MetadataClient(mockCtrl)\n\tclient := NewECSClient(credentials.AnonymousCredentials,\n\t\t&config.Config{Cluster: configuredCluster,\n\t\t\tAWSRegion: \"us-east-1\",\n\t\t\tReservedMemory: uint16(mem) + 1,\n\t\t}, mockEC2Metadata)\n\tmockSDK := mock_api.NewMockECSSDK(mockCtrl)\n\tmockSubmitStateSDK := mock_api.NewMockECSSubmitStateSDK(mockCtrl)\n\tclient.(*APIECSClient).SetSDK(mockSDK)\n\tclient.(*APIECSClient).SetSubmitStateChangeSDK(mockSubmitStateSDK)\n\n\tgomock.InOrder(\n\t\tmockEC2Metadata.EXPECT().GetDynamicData(ec2.InstanceIdentityDocumentResource).Return(\"instanceIdentityDocument\", nil),\n\t\tmockEC2Metadata.EXPECT().GetDynamicData(ec2.InstanceIdentityDocumentSignatureResource).Return(\"signature\", nil),\n\t)\n\t_, _, err := client.RegisterContainerInstance(\"\", nil, nil, \"\", nil)\n\tassert.Error(t, err, \"Register resource with negative value should cause registration fail\")\n}\n\nfunc TestRegisterContainerInstanceWithEmptyTags(t *testing.T) {\n\tmockCtrl := gomock.NewController(t)\n\tdefer mockCtrl.Finish()\n\tmockEC2Metadata := mock_ec2.NewMockEC2MetadataClient(mockCtrl)\n\tclient, mc, _ := NewMockClient(mockCtrl, mockEC2Metadata, nil)\n\n\texpectedAttributes := map[string]string{\n\t\t\"ecs.os-type\": config.OSType,\n\t\t\"my_custom_attribute\": \"Custom_Value1\",\n\t\t\"my_other_custom_attribute\": \"Custom_Value2\",\n\t}\n\n\tfakeCapabilities := []string{\"capability1\", \"capability2\"}\n\n\tgomock.InOrder(\n\t\tmockEC2Metadata.EXPECT().GetDynamicData(ec2.InstanceIdentityDocumentResource).Return(\"instanceIdentityDocument\", nil),\n\t\tmockEC2Metadata.EXPECT().GetDynamicData(ec2.InstanceIdentityDocumentSignatureResource).Return(\"signature\", nil),\n\t\tmc.EXPECT().RegisterContainerInstance(gomock.Any()).Do(func(req *ecs.RegisterContainerInstanceInput) {\n\t\t\tassert.Nil(t, req.Tags)\n\t\t}).Return(&ecs.RegisterContainerInstanceOutput{\n\t\t\tContainerInstance: &ecs.ContainerInstance{\n\t\t\t\tContainerInstanceArn: aws.String(\"registerArn\"),\n\t\t\t\tAttributes: buildAttributeList(fakeCapabilities, expectedAttributes)}},\n\t\t\tnil),\n\t)\n\n\t_, _, err := client.RegisterContainerInstance(\"\", nil, make([]*ecs.Tag, 0), \"\", nil)\n\tassert.NoError(t, err)\n}\n\nfunc TestValidateRegisteredAttributes(t *testing.T) {\n\torigAttributes := []*ecs.Attribute{\n\t\t{Name: aws.String(\"foo\"), Value: aws.String(\"bar\")},\n\t\t{Name: aws.String(\"baz\"), Value: aws.String(\"quux\")},\n\t\t{Name: aws.String(\"no_value\"), Value: aws.String(\"\")},\n\t}\n\tactualAttributes := []*ecs.Attribute{\n\t\t{Name: aws.String(\"baz\"), Value: aws.String(\"quux\")},\n\t\t{Name: aws.String(\"foo\"), Value: aws.String(\"bar\")},\n\t\t{Name: aws.String(\"no_value\"), Value: aws.String(\"\")},\n\t\t{Name: aws.String(\"ecs.internal-attribute\"), Value: aws.String(\"some text\")},\n\t}\n\tassert.NoError(t, validateRegisteredAttributes(origAttributes, actualAttributes))\n\n\torigAttributes = append(origAttributes, &ecs.Attribute{Name: aws.String(\"abc\"), Value: aws.String(\"xyz\")})\n\tassert.Error(t, validateRegisteredAttributes(origAttributes, actualAttributes))\n}\n\nfunc findResource(resources []*ecs.Resource, name string) (*ecs.Resource, bool) {\n\tfor _, resource := range resources {\n\t\tif name == *resource.Name {\n\t\t\treturn resource, true\n\t\t}\n\t}\n\treturn nil, false\n}\n\nfunc TestRegisterBlankCluster(t *testing.T) {\n\tmockCtrl := gomock.NewController(t)\n\tdefer mockCtrl.Finish()\n\tmockEC2Metadata := mock_ec2.NewMockEC2MetadataClient(mockCtrl)\n\n\t\/\/ Test the special 'empty cluster' behavior of creating 'default'\n\tclient := NewECSClient(credentials.AnonymousCredentials,\n\t\t&config.Config{\n\t\t\tCluster: \"\",\n\t\t\tAWSRegion: \"us-east-1\",\n\t\t},\n\t\tmockEC2Metadata)\n\tmc := mock_api.NewMockECSSDK(mockCtrl)\n\tclient.(*APIECSClient).SetSDK(mc)\n\n\texpectedAttributes := map[string]string{\n\t\t\"ecs.os-type\": config.OSType,\n\t}\n\tdefaultCluster := config.DefaultClusterName\n\tgomock.InOrder(\n\t\tmockEC2Metadata.EXPECT().GetDynamicData(ec2.InstanceIdentityDocumentResource).Return(\"instanceIdentityDocument\", nil),\n\t\tmockEC2Metadata.EXPECT().GetDynamicData(ec2.InstanceIdentityDocumentSignatureResource).Return(\"signature\", nil),\n\t\tmc.EXPECT().RegisterContainerInstance(gomock.Any()).Return(nil, awserr.New(\"ClientException\", \"Cluster not found.\", errors.New(\"Cluster not found.\"))),\n\t\tmc.EXPECT().CreateCluster(&ecs.CreateClusterInput{ClusterName: &defaultCluster}).Return(&ecs.CreateClusterOutput{Cluster: &ecs.Cluster{ClusterName: &defaultCluster}}, nil),\n\t\tmockEC2Metadata.EXPECT().GetDynamicData(ec2.InstanceIdentityDocumentResource).Return(\"instanceIdentityDocument\", nil),\n\t\tmockEC2Metadata.EXPECT().GetDynamicData(ec2.InstanceIdentityDocumentSignatureResource).Return(\"signature\", nil),\n\t\tmc.EXPECT().RegisterContainerInstance(gomock.Any()).Do(func(req *ecs.RegisterContainerInstanceInput) {\n\t\t\tif *req.Cluster != config.DefaultClusterName {\n\t\t\t\tt.Errorf(\"Wrong cluster: %v\", *req.Cluster)\n\t\t\t}\n\t\t\tif *req.InstanceIdentityDocument != iid {\n\t\t\t\tt.Errorf(\"Wrong IID: %v\", *req.InstanceIdentityDocument)\n\t\t\t}\n\t\t\tif *req.InstanceIdentityDocumentSignature != iidSignature {\n\t\t\t\tt.Errorf(\"Wrong IID sig: %v\", *req.InstanceIdentityDocumentSignature)\n\t\t\t}\n\t\t}).Return(&ecs.RegisterContainerInstanceOutput{\n\t\t\tContainerInstance: &ecs.ContainerInstance{\n\t\t\t\tContainerInstanceArn: aws.String(\"registerArn\"),\n\t\t\t\tAttributes: buildAttributeList(nil, expectedAttributes)}},\n\t\t\tnil),\n\t)\n\n\tarn, availabilityzone, err := client.RegisterContainerInstance(\"\", nil, nil, \"\", nil)\n\tif err != nil {\n\t\tt.Errorf(\"Should not be an error: %v\", err)\n\t}\n\tif arn != \"registerArn\" {\n\t\tt.Errorf(\"Wrong arn: %v\", arn)\n\t}\n\tif availabilityzone != \"\" {\n\t\tt.Errorf(\"wrong availability zone: %v\", availabilityzone)\n\t}\n}\n\nfunc TestRegisterBlankClusterNotCreatingClusterWhenErrorNotClusterNotFound(t *testing.T) {\n\tmockCtrl := gomock.NewController(t)\n\tdefer mockCtrl.Finish()\n\tmockEC2Metadata := mock_ec2.NewMockEC2MetadataClient(mockCtrl)\n\n\t\/\/ Test the special 'empty cluster' behavior of creating 'default'\n\tclient := NewECSClient(credentials.AnonymousCredentials,\n\t\t&config.Config{\n\t\t\tCluster: \"\",\n\t\t\tAWSRegion: \"us-east-1\",\n\t\t},\n\t\tmockEC2Metadata)\n\tmc := mock_api.NewMockECSSDK(mockCtrl)\n\tclient.(*APIECSClient).SetSDK(mc)\n\n\texpectedAttributes := map[string]string{\n\t\t\"ecs.os-type\": config.OSType,\n\t}\n\n\tgomock.InOrder(\n\t\tmockEC2Metadata.EXPECT().GetDynamicData(ec2.InstanceIdentityDocumentResource).Return(\"instanceIdentityDocument\", nil),\n\t\tmockEC2Metadata.EXPECT().GetDynamicData(ec2.InstanceIdentityDocumentSignatureResource).Return(\"signature\", nil),\n\t\tmc.EXPECT().RegisterContainerInstance(gomock.Any()).Return(nil, awserr.New(\"ClientException\", \"Invalid request.\", errors.New(\"Invalid request.\"))),\n\t\tmockEC2Metadata.EXPECT().GetDynamicData(ec2.InstanceIdentityDocumentResource).Return(\"instanceIdentityDocument\", nil),\n\t\tmockEC2Metadata.EXPECT().GetDynamicData(ec2.InstanceIdentityDocumentSignatureResource).Return(\"signature\", nil),\n\t\tmc.EXPECT().RegisterContainerInstance(gomock.Any()).Do(func(req *ecs.RegisterContainerInstanceInput) {\n\t\t\tif *req.Cluster != config.DefaultClusterName {\n\t\t\t\tt.Errorf(\"Wrong cluster: %v\", *req.Cluster)\n\t\t\t}\n\t\t\tif *req.InstanceIdentityDocument != iid {\n\t\t\t\tt.Errorf(\"Wrong IID: %v\", *req.InstanceIdentityDocument)\n\t\t\t}\n\t\t\tif *req.InstanceIdentityDocumentSignature != iidSignature {\n\t\t\t\tt.Errorf(\"Wrong IID sig: %v\", *req.InstanceIdentityDocumentSignature)\n\t\t\t}\n\t\t}).Return(&ecs.RegisterContainerInstanceOutput{\n\t\t\tContainerInstance: &ecs.ContainerInstance{\n\t\t\t\tContainerInstanceArn: aws.String(\"registerArn\"),\n\t\t\t\tAttributes: buildAttributeList(nil, expectedAttributes)}},\n\t\t\tnil),\n\t)\n\n\tarn, _, err := client.RegisterContainerInstance(\"\", nil, nil, \"\", nil)\n\tassert.NoError(t, err, \"Should not return error\")\n\tassert.Equal(t, \"registerArn\", arn, \"Wrong arn\")\n}\n\nfunc TestDiscoverTelemetryEndpoint(t *testing.T) {\n\tmockCtrl := gomock.NewController(t)\n\tdefer mockCtrl.Finish()\n\tclient, mc, _ := NewMockClient(mockCtrl, ec2.NewBlackholeEC2MetadataClient(), nil)\n\texpectedEndpoint := \"http:\/\/127.0.0.1\"\n\tmc.EXPECT().DiscoverPollEndpoint(gomock.Any()).Return(&ecs.DiscoverPollEndpointOutput{TelemetryEndpoint: &expectedEndpoint}, nil)\n\tendpoint, err := client.DiscoverTelemetryEndpoint(\"containerInstance\")\n\tif err != nil {\n\t\tt.Error(\"Error getting telemetry endpoint: \", err)\n\t}\n\tif expectedEndpoint != endpoint {\n\t\tt.Errorf(\"Expected telemetry endpoint(%s) != endpoint(%s)\", expectedEndpoint, endpoint)\n\t}\n}\n\nfunc TestDiscoverTelemetryEndpointError(t *testing.T) {\n\tmockCtrl := gomock.NewController(t)\n\tdefer mockCtrl.Finish()\n\tclient, mc, _ := NewMockClient(mockCtrl, ec2.NewBlackholeEC2MetadataClient(), nil)\n\tmc.EXPECT().DiscoverPollEndpoint(gomock.Any()).Return(nil, fmt.Errorf(\"Error getting endpoint\"))\n\t_, err := client.DiscoverTelemetryEndpoint(\"containerInstance\")\n\tif err == nil {\n\t\tt.Error(\"Expected error getting telemetry endpoint, didn't get any\")\n\t}\n}\n\nfunc TestDiscoverNilTelemetryEndpoint(t *testing.T) {\n\tmockCtrl := gomock.NewController(t)\n\tdefer mockCtrl.Finish()\n\tclient, mc, _ := NewMockClient(mockCtrl, ec2.NewBlackholeEC2MetadataClient(), nil)\n\tpollEndpoint := \"http:\/\/127.0.0.1\"\n\tmc.EXPECT().DiscoverPollEndpoint(gomock.Any()).Return(&ecs.DiscoverPollEndpointOutput{Endpoint: &pollEndpoint}, nil)\n\t_, err := client.DiscoverTelemetryEndpoint(\"containerInstance\")\n\tif err == nil {\n\t\tt.Error(\"Expected error getting telemetry endpoint with old response\")\n\t}\n}\n\nfunc TestDiscoverPollEndpointCacheHit(t *testing.T) {\n\tmockCtrl := gomock.NewController(t)\n\tdefer mockCtrl.Finish()\n\n\tmockSDK := mock_api.NewMockECSSDK(mockCtrl)\n\tpollEndpoinCache := mock_async.NewMockCache(mockCtrl)\n\tclient := &APIECSClient{\n\t\tcredentialProvider: credentials.AnonymousCredentials,\n\t\tconfig: &config.Config{\n\t\t\tCluster: configuredCluster,\n\t\t\tAWSRegion: \"us-east-1\",\n\t\t},\n\t\tstandardClient: mockSDK,\n\t\tec2metadata: ec2.NewBlackholeEC2MetadataClient(),\n\t\tpollEndpoinCache: pollEndpoinCache,\n\t}\n\n\tpollEndpoint := \"http:\/\/127.0.0.1\"\n\tpollEndpoinCache.EXPECT().Get(\"containerInstance\").Return(\n\t\t&ecs.DiscoverPollEndpointOutput{\n\t\t\tEndpoint: aws.String(pollEndpoint),\n\t\t}, true)\n\toutput, err := client.discoverPollEndpoint(\"containerInstance\")\n\tif err != nil {\n\t\tt.Fatalf(\"Error in discoverPollEndpoint: %v\", err)\n\t}\n\tif aws.StringValue(output.Endpoint) != pollEndpoint {\n\t\tt.Errorf(\"Mismatch in poll endpoint: %s != %s\", aws.StringValue(output.Endpoint), pollEndpoint)\n\t}\n}\n\nfunc TestDiscoverPollEndpointCacheMiss(t *testing.T) {\n\tmockCtrl := gomock.NewController(t)\n\tdefer mockCtrl.Finish()\n\n\tmockSDK := mock_api.NewMockECSSDK(mockCtrl)\n\tpollEndpoinCache := mock_async.NewMockCache(mockCtrl)\n\tclient := &APIECSClient{\n\t\tcredentialProvider: credentials.AnonymousCredentials,\n\t\tconfig: &config.Config{\n\t\t\tCluster: configuredCluster,\n\t\t\tAWSRegion: \"us-east-1\",\n\t\t},\n\t\tstandardClient: mockSDK,\n\t\tec2metadata: ec2.NewBlackholeEC2MetadataClient(),\n\t\tpollEndpoinCache: pollEndpoinCache,\n\t}\n\tpollEndpoint := \"http:\/\/127.0.0.1\"\n\tpollEndpointOutput := &ecs.DiscoverPollEndpointOutput{\n\t\tEndpoint: &pollEndpoint,\n\t}\n\n\tgomock.InOrder(\n\t\tpollEndpoinCache.EXPECT().Get(\"containerInstance\").Return(nil, false),\n\t\tmockSDK.EXPECT().DiscoverPollEndpoint(gomock.Any()).Return(pollEndpointOutput, nil),\n\t\tpollEndpoinCache.EXPECT().Set(\"containerInstance\", pollEndpointOutput),\n\t)\n\n\toutput, err := client.discoverPollEndpoint(\"containerInstance\")\n\tif err != nil {\n\t\tt.Fatalf(\"Error in discoverPollEndpoint: %v\", err)\n\t}\n\tif aws.StringValue(output.Endpoint) != pollEndpoint {\n\t\tt.Errorf(\"Mismatch in poll endpoint: %s != %s\", aws.StringValue(output.Endpoint), pollEndpoint)\n\t}\n}\n\nfunc TestDiscoverTelemetryEndpointAfterPollEndpointCacheHit(t *testing.T) {\n\tmockCtrl := gomock.NewController(t)\n\tdefer mockCtrl.Finish()\n\n\tmockSDK := mock_api.NewMockECSSDK(mockCtrl)\n\tpollEndpoinCache := async.NewLRUCache(1, 10*time.Minute)\n\tclient := &APIECSClient{\n\t\tcredentialProvider: credentials.AnonymousCredentials,\n\t\tconfig: &config.Config{\n\t\t\tCluster: configuredCluster,\n\t\t\tAWSRegion: \"us-east-1\",\n\t\t},\n\t\tstandardClient: mockSDK,\n\t\tec2metadata: ec2.NewBlackholeEC2MetadataClient(),\n\t\tpollEndpoinCache: pollEndpoinCache,\n\t}\n\n\tpollEndpoint := \"http:\/\/127.0.0.1\"\n\tmockSDK.EXPECT().DiscoverPollEndpoint(gomock.Any()).Return(\n\t\t&ecs.DiscoverPollEndpointOutput{\n\t\t\tEndpoint: &pollEndpoint,\n\t\t\tTelemetryEndpoint: &pollEndpoint,\n\t\t}, nil)\n\tendpoint, err := client.DiscoverPollEndpoint(\"containerInstance\")\n\tif err != nil {\n\t\tt.Fatalf(\"Error in discoverPollEndpoint: %v\", err)\n\t}\n\tif endpoint != pollEndpoint {\n\t\tt.Errorf(\"Mismatch in poll endpoint: %s\", endpoint)\n\t}\n\ttelemetryEndpoint, err := client.DiscoverTelemetryEndpoint(\"containerInstance\")\n\tif err != nil {\n\t\tt.Fatalf(\"Error in discoverTelemetryEndpoint: %v\", err)\n\t}\n\tif telemetryEndpoint != pollEndpoint {\n\t\tt.Errorf(\"Mismatch in poll endpoint: %s\", endpoint)\n\t}\n}\n\n\/\/ TestSubmitTaskStateChangeWithAttachments tests the SubmitTaskStateChange API\n\/\/ also send the Attachment Status\nfunc TestSubmitTaskStateChangeWithAttachments(t *testing.T) {\n\tmockCtrl := gomock.NewController(t)\n\tdefer mockCtrl.Finish()\n\n\tclient, _, mockSubmitStateClient := NewMockClient(mockCtrl, ec2.NewBlackholeEC2MetadataClient(), nil)\n\tmockSubmitStateClient.EXPECT().SubmitTaskStateChange(&taskSubmitInputMatcher{\n\t\tecs.SubmitTaskStateChangeInput{\n\t\t\tCluster: aws.String(configuredCluster),\n\t\t\tTask: aws.String(\"task_arn\"),\n\t\t\tAttachments: []*ecs.AttachmentStateChange{\n\t\t\t\t{\n\t\t\t\t\tAttachmentArn: aws.String(\"eni_arn\"),\n\t\t\t\t\tStatus: aws.String(\"ATTACHED\"),\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t})\n\n\terr := client.SubmitTaskStateChange(api.TaskStateChange{\n\t\tTaskARN: \"task_arn\",\n\t\tAttachment: &apieni.ENIAttachment{\n\t\t\tAttachmentARN: \"eni_arn\",\n\t\t\tStatus: apieni.ENIAttached,\n\t\t},\n\t})\n\tassert.NoError(t, err, \"Unable to submit task state change with attachments\")\n}\n\nfunc TestSubmitTaskStateChangeWithoutAttachments(t *testing.T) {\n\tmockCtrl := gomock.NewController(t)\n\tdefer mockCtrl.Finish()\n\n\tclient, _, mockSubmitStateClient := NewMockClient(mockCtrl, ec2.NewBlackholeEC2MetadataClient(), nil)\n\tmockSubmitStateClient.EXPECT().SubmitTaskStateChange(&taskSubmitInputMatcher{\n\t\tecs.SubmitTaskStateChangeInput{\n\t\t\tCluster: aws.String(configuredCluster),\n\t\t\tTask: aws.String(\"task_arn\"),\n\t\t\tReason: aws.String(\"\"),\n\t\t\tStatus: aws.String(\"RUNNING\"),\n\t\t},\n\t})\n\n\terr := client.SubmitTaskStateChange(api.TaskStateChange{\n\t\tTaskARN: \"task_arn\",\n\t\tStatus: apitaskstatus.TaskRunning,\n\t})\n\tassert.NoError(t, err, \"Unable to submit task state change with no attachments\")\n}\n\n\/\/ TestSubmitContainerStateChangeWhileTaskInPending tests the container state change was submitted\n\/\/ when the task is still in pending state\nfunc TestSubmitContainerStateChangeWhileTaskInPending(t *testing.T) {\n\tmockCtrl := gomock.NewController(t)\n\tdefer mockCtrl.Finish()\n\n\ttestCases := []struct {\n\t\ttaskStatus apitaskstatus.TaskStatus\n\t}{\n\t\t{\n\t\t\tapitaskstatus.TaskStatusNone,\n\t\t},\n\t\t{\n\t\t\tapitaskstatus.TaskPulled,\n\t\t},\n\t\t{\n\t\t\tapitaskstatus.TaskCreated,\n\t\t},\n\t}\n\n\ttaskStateChangePending := api.TaskStateChange{\n\t\tStatus: apitaskstatus.TaskCreated,\n\t\tTaskARN: \"arn\",\n\t\tContainers: []api.ContainerStateChange{\n\t\t\t{\n\t\t\t\tTaskArn: \"arn\",\n\t\t\t\tContainerName: \"container\",\n\t\t\t\tStatus: apicontainerstatus.ContainerRunning,\n\t\t\t},\n\t\t},\n\t}\n\n\tfor _, tc := range testCases {\n\t\tt.Run(fmt.Sprintf(\"TaskStatus: %s\", tc.taskStatus.String()), func(t *testing.T) {\n\t\t\ttaskStateChangePending.Status = tc.taskStatus\n\t\t\tclient, _, mockSubmitStateClient := NewMockClient(mockCtrl, ec2.NewBlackholeEC2MetadataClient(), nil)\n\t\t\tmockSubmitStateClient.EXPECT().SubmitTaskStateChange(&taskSubmitInputMatcher{\n\t\t\t\tecs.SubmitTaskStateChangeInput{\n\t\t\t\t\tCluster: strptr(configuredCluster),\n\t\t\t\t\tTask: strptr(\"arn\"),\n\t\t\t\t\tStatus: strptr(\"PENDING\"),\n\t\t\t\t\tReason: strptr(\"\"),\n\t\t\t\t\tContainers: []*ecs.ContainerStateChange{\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tContainerName: strptr(\"container\"),\n\t\t\t\t\t\t\tStatus: strptr(\"RUNNING\"),\n\t\t\t\t\t\t\tNetworkBindings: []*ecs.NetworkBinding{},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t})\n\t\t\terr := client.SubmitTaskStateChange(taskStateChangePending)\n\t\t\tassert.NoError(t, err)\n\t\t})\n\t}\n}\n\nfunc extractTagsMapFromRegisterContainerInstanceInput(req *ecs.RegisterContainerInstanceInput) map[string]string {\n\ttagsMap := make(map[string]string, len(req.Tags))\n\tfor i := range req.Tags {\n\t\ttagsMap[aws.StringValue(req.Tags[i].Key)] = aws.StringValue(req.Tags[i].Value)\n\t}\n\treturn tagsMap\n}\n","avg_line_length":36.902892562,"max_line_length":174,"alphanum_fraction":0.7289065562} +{"size":1110,"ext":"go","lang":"Go","max_stars_count":null,"content":"package main\n\nimport (\n\t\"fmt\"\n\t\"log\"\n\t\"time\"\n\n\t\"github.com\/hfajunakaliantiga\/xendit-go\"\n\t\"github.com\/hfajunakaliantiga\/xendit-go\/customer\"\n)\n\nfunc customerTest() {\n\tcustomerAddress := xendit.CustomerAddress{\n\t\tCountry: \"ID\",\n\t\tStreetLine1: \"Jl. 123\",\n\t\tStreetLine2: \"Jl. 456\",\n\t\tCity: \"Jakarta Selatan\",\n\t\tProvince: \"DKI Jakarta\",\n\t\tState: \"-\",\n\t\tPostalCode: \"12345\",\n\t}\n\n\tmetadata := map[string]interface{}{\n\t\t\"meta\": \"data\",\n\t}\n\n\tcreateCustomerData := customer.CreateCustomerParams{\n\t\tReferenceID: time.Now().String(),\n\t\tEmail: \"tes@tes.com\",\n\t\tGivenNames: \"Given Names\",\n\t\tNationality: \"ID\",\n\t\tDateOfBirth: \"1995-12-30\",\n\t\tAddresses: []xendit.CustomerAddress{customerAddress},\n\t\tMetadata: metadata,\n\t}\n\n\tresp, err := customer.CreateCustomer(&createCustomerData)\n\tif err != nil {\n\t\tlog.Panic(err)\n\t}\n\tgetCustomerByReferenceIDData := customer.GetCustomerByReferenceIDParams{\n\t\tReferenceID: resp.ReferenceID,\n\t}\n\n\t_, err = customer.GetCustomerByReferenceID(&getCustomerByReferenceIDData)\n\tif err != nil {\n\t\tlog.Panic(err)\n\t}\n\n\tfmt.Println(\"Customer integration tests done!\")\n}\n","avg_line_length":21.3461538462,"max_line_length":74,"alphanum_fraction":0.6864864865} +{"size":17485,"ext":"go","lang":"Go","max_stars_count":null,"content":"package main\n\nimport (\n\t\"encoding\/binary\"\n\t\"image\/color\"\n\t\"math\"\n\t\"math\/rand\"\n\t\"os\"\n\t\"strconv\"\n\n\t\"github.com\/chewxy\/math32\"\n\t\"github.com\/goki\/gi\/gi3d\"\n\t\"github.com\/goki\/gi\/gist\"\n\t\"github.com\/goki\/mat32\"\n\t\"github.com\/spaolacci\/murmur3\"\n)\n\ntype classDetails struct {\n\tclass string\n\tbrightColor color.RGBA\n\tmedColor color.RGBA\n\tdimColor color.RGBA\n\todds float32\n\tfudge float32\n\tminMass float32\n\tdeltaMass float32\n\tminRadii float32\n\tdeltaRadii float32\n\tminLum float32\n\tdeltaLum float32\n\tpixels int32\n}\n\ntype star struct {\n\tid int\n\tclass string\n\tbrightColor color.RGBA\n\tdimColor color.RGBA\n\tpixels int32\n\tmass float32\n\tradii float32\n\tluminance float32\n\t\/\/ 3D position\n\tx float32\n\ty float32\n\tz float32\n\t\/\/ sector location, 0 <= sx, sy, sz < 100\n\tsx float32\n\tsy float32\n\tsz float32\n\t\/\/ display location, 0 <= dx, dy, dz < 1000\n}\n\ntype sector struct {\n\tx uint32\n\ty uint32\n\tz uint32\n}\n\ntype position struct {\n\tx float32\n\ty float32\n\tz float32\n}\n\ntype jump struct {\n\tcolor gist.Color\n\tactiveColor gist.Color\n\tparsecs int\n\tdistance float32\n\ts1ID int\n\ts2ID int\n}\n\ntype simpleLine struct {\n\tfrom position\n\tto position\n\tjumpInfo *jump\n\tlines *gi3d.Lines\n}\n\nconst (\n\tintensityStep = 8\n\tfaster = true\n\tfastest = false\n)\n\nvar (\n\tsize = position{x: 2, y: 2, z: 1}\n\tsizeFloats = position{x: float32(size.x), y: float32(size.y), z: float32(size.z)}\n\toffsets = position{x: sizeFloats.x \/ -2.0, y: sizeFloats.y \/ -2.0, z: sizeFloats.z \/ -2.0}\n\n\tintensity = []uint8{\n\t\t0, 0, intensityStep, 2 * intensityStep, 3 * intensityStep, 4 * intensityStep, 5 * intensityStep,\n\t}\n\tjumpColors = []gist.Color{\n\t\tgist.Color(color.RGBA{R: math.MaxUint8 - eighth, G: 0, B: 0, A: math.MaxUint8 - intensity[0]}),\n\t\tgist.Color(color.RGBA{R: math.MaxUint8 - eighth, G: half + eighth - eighth, B: 0, A: math.MaxUint8 - intensity[1]}),\n\t\tgist.Color(color.RGBA{R: math.MaxUint8 - eighth, G: math.MaxUint8 - eighth, B: 0, A: math.MaxUint8 - intensity[2]}),\n\t\tgist.Color(color.RGBA{R: 0, G: math.MaxUint8 - eighth, B: 0, A: math.MaxUint8 - intensity[3]}),\n\t\tgist.Color(color.RGBA{R: 0, G: 0, B: math.MaxUint8 - eighth, A: math.MaxUint8 - intensity[4]}),\n\t\t\/\/gist.Color(color.RGBA{R: math.MaxUint8 - quarter, G: 0, B: math.MaxUint8 - quarter, A: math.MaxUint8 - intensity[5]}),\/\/\n\t}\n\n\ttween = uint8(sevenEighths)\n\tmed = uint8(threeQuarters)\n\tdim = uint8(half)\n\tnoJump = jump{color: gist.Color(color.RGBA{R: 0, G: 0, B: 0, A: 0}), activeColor: gist.Color(color.RGBA{R: 0, G: 0, B: 0, A: 0}),\n\t\tparsecs: 0, distance: 20480.0, s1ID: -1, s2ID: -1}\n\tnoLine = simpleLine{from: position{x: 0, y: 0, z: 0}, to: position{x: 0, y: 0, z: 0}, jumpInfo: &noJump, lines: &gi3d.Lines{}}\n\n\tjumpsByStar = make(map[int][]*jump)\n\n\tclassO = classDetails{\n\t\tclass: \"O\",\n\t\tbrightColor: color.RGBA{R: 0, G: 0, B: tween, A: opaque},\n\t\tmedColor: color.RGBA{R: 0, G: 0, B: tween, A: opaque},\n\t\tdimColor: color.RGBA{R: 0, G: 0, B: med, A: opaque},\n\t\todds: .0000003,\n\t\tfudge: .0000000402,\n\t\tminMass: 16.00001,\n\t\tdeltaMass: 243.2,\n\t\tminRadii: 6,\n\t\tdeltaRadii: 17.3,\n\t\tminLum: 30000,\n\t\tdeltaLum: 147000.2,\n\t\tpixels: 11,\n\t}\n\n\tclassB = classDetails{\n\t\tclass: \"B\",\n\t\tbrightColor: color.RGBA{R: dim, G: dim, B: tween, A: opaque},\n\t\tmedColor: color.RGBA{R: dim \/ two, G: dim \/ two, B: half, A: opaque},\n\t\tdimColor: color.RGBA{R: dim \/ (two * two), G: dim \/ (two * two), B: tween \/ two, A: opaque},\n\t\todds: .0013,\n\t\tfudge: .0003,\n\t\tminMass: 2.1,\n\t\tdeltaMass: 13.9,\n\t\tminRadii: 1.8,\n\t\tdeltaRadii: 4.8,\n\t\tminLum: 25,\n\t\tdeltaLum: 29975,\n\t\tpixels: 8,\n\t}\n\n\tclassA = classDetails{\n\t\tclass: \"A\",\n\t\tbrightColor: color.RGBA{R: tween, G: tween, B: tween, A: opaque},\n\t\tmedColor: color.RGBA{R: sevenEighths, G: sevenEighths, B: sevenEighths, A: opaque},\n\t\tdimColor: color.RGBA{R: half, G: half, B: half, A: opaque},\n\t\todds: .006,\n\t\tfudge: .0018,\n\t\tminMass: 1.4,\n\t\tdeltaMass: .7,\n\t\tminRadii: 1.4,\n\t\tdeltaRadii: .4,\n\t\tminLum: 5,\n\t\tdeltaLum: 20,\n\t\tpixels: 6,\n\t}\n\n\tclassF = classDetails{\n\t\tclass: \"F\",\n\t\tbrightColor: color.RGBA{R: tween, G: tween, B: sevenEighths, A: opaque},\n\t\tmedColor: color.RGBA{R: sevenEighths, G: sevenEighths, B: half, A: opaque},\n\t\tdimColor: color.RGBA{R: half, G: half, B: quarter \/ two, A: opaque},\n\t\todds: .03,\n\t\tfudge: .012,\n\t\tminMass: 1.04,\n\t\tdeltaMass: .36,\n\t\tminRadii: 1.15,\n\t\tdeltaRadii: .25,\n\t\tminLum: 1.5,\n\t\tdeltaLum: 3.5,\n\t\tpixels: 5,\n\t}\n\n\tclassG = classDetails{\n\t\tclass: \"G\",\n\t\tbrightColor: color.RGBA{R: tween, G: tween, B: 0, A: opaque},\n\t\tmedColor: color.RGBA{R: sevenEighths, G: sevenEighths, B: 0, A: opaque},\n\t\tdimColor: color.RGBA{R: half, G: half, B: 0, A: opaque},\n\t\todds: .076,\n\t\tfudge: .01102,\n\t\tminMass: .8,\n\t\tdeltaMass: .24,\n\t\tminRadii: .96,\n\t\tdeltaRadii: .19,\n\t\tminLum: .6,\n\t\tdeltaLum: .9,\n\t\tpixels: 4,\n\t}\n\n\tclassK = classDetails{\n\t\tclass: \"K\",\n\t\tbrightColor: color.RGBA{R: tween, G: tween - eighth, B: tween - quarter, A: opaque},\n\t\tmedColor: color.RGBA{R: threeQuarters, G: threeQuarters - eighth, B: half, A: opaque},\n\t\tdimColor: color.RGBA{R: half, G: half - eighth, B: quarter, A: opaque},\n\t\todds: .121,\n\t\tfudge: .042,\n\t\tminMass: .45,\n\t\tdeltaMass: .35,\n\t\tminRadii: .7,\n\t\tdeltaRadii: .26,\n\t\tminLum: .08,\n\t\tdeltaLum: .52,\n\t\tpixels: 3,\n\t}\n\n\tclassM = classDetails{\n\t\tclass: \"M\",\n\t\tbrightColor: color.RGBA{R: tween, G: 0, B: 0, A: opaque},\n\t\tmedColor: color.RGBA{R: sevenEighths, G: 0, B: 0, A: opaque},\n\t\tdimColor: color.RGBA{R: threeQuarters, G: 0, B: 0, A: opaque},\n\t\todds: .7645,\n\t\tfudge: .04,\n\t\tminMass: 1.04,\n\t\tdeltaMass: .36,\n\t\tminRadii: 1.15,\n\t\tdeltaRadii: .25,\n\t\tminLum: 1.5,\n\t\tdeltaLum: 3.5,\n\t\tpixels: 2,\n\t}\n\n\tstarDetailsByClass = [7]classDetails{classO, classB, classA, classF, classG, classK, classM}\n\t\/\/ classByZoom = [11]int{7, 7, 7, 7, 7, 7, 6, 5, 4, 3, 2}\n)\n\nfunc getStarDetails(classDetails classDetails, sector sector, random1m *rand.Rand) []*star {\n\tstars := make([]*star, 0)\n\tloopSize := int32(800 * (classDetails.odds - classDetails.fudge + 2*classDetails.fudge*random1m.Float32()))\n\tfor i := 0; i < int(loopSize); i++ {\n\t\tnextStar := star{}\n\t\tnextStar.id = len(stars)\n\t\trandom1 := random1m.Float32()\n\t\tnextStar.sx = random1m.Float32()\n\t\tnextStar.sy = random1m.Float32()\n\t\tnextStar.sz = random1m.Float32()\n\t\tnextStar.x = float32(sector.x) + nextStar.sx\n\t\tnextStar.y = float32(sector.y) + nextStar.sy\n\t\tnextStar.z = float32(sector.z) + nextStar.sz\n\t\tnextStar.class = classDetails.class\n\t\tnextStar.brightColor = classDetails.brightColor\n\t\tnextStar.dimColor = classDetails.dimColor\n\t\tnextStar.mass = classDetails.minMass + classDetails.deltaMass*(1+random1)\n\t\tnextStar.radii = (classDetails.minRadii + random1*classDetails.deltaRadii) \/ 2\n\t\tnextStar.luminance = classDetails.minLum + random1*classDetails.deltaLum\n\t\tnextStar.pixels = classDetails.pixels\n\t\tstars = append(stars, &nextStar)\n\t}\n\n\treturn stars\n}\n\nfunc getSectorDetails(fromSector sector) (result []*star) {\n\tresult = make([]*star, 0)\n\trandom1m := getHash(fromSector)\n\tclassCount := 0\n\tfor _, starDetails := range starDetailsByClass {\n\t\tnextClass := getStarDetails(starDetails, fromSector, random1m)\n\t\tresult = append(result, nextClass...)\n\t\tclassCount++\n\t\t\/\/ if classCount > classByZoom[zoomIndex] {\n\t\t\/\/ \tbreak\n\t\t\/\/}\n\t}\n\n\treturn result\n}\n\nfunc getHash(aSector sector) *rand.Rand {\n\tid := murmur3.New64()\n\tbuf := make([]byte, 4)\n\tbinary.LittleEndian.PutUint32(buf, aSector.x)\n\t_, err := id.Write(buf)\n\tif err != nil {\n\t\tprint(\"Failed to hash part 1\")\n\t}\n\n\tbinary.LittleEndian.PutUint32(buf, aSector.y)\n\t_, err = id.Write(buf)\n\tif err != nil {\n\t\tprint(\"Failed to hash part two\")\n\t}\n\n\tbinary.LittleEndian.PutUint32(buf, aSector.z)\n\t_, err = id.Write(buf)\n\tif err != nil {\n\t\tprint(\"Failed to hash part 3\")\n\t}\n\n\treturn rand.New(rand.NewSource(int64(id.Sum64())))\n}\n\nfunc distance(s1 *star, s2 *star) float32 {\n\treturn math32.Sqrt((s1.x-s2.x)*(s1.x-s2.x) + (s1.y-s2.y)*(s1.y-s2.y) + (s1.z-s2.z)*(s1.z-s2.z))\n}\n\nvar (\n\tstars []*star\n\tlines []*simpleLine\n\n\tsName = \"sphere\"\n\tsphereModel *gi3d.Sphere\n\n\trendered = false\n\tconnectedStar int\n\thighWater int\n)\n\nfunc renderStars(sc *gi3d.Scene) {\n\tif !rendered {\n\t\tstars = make([]*star, 0)\n\t\tid := 0\n\t\tfor x := uint32(0); x < 2; x++ {\n\t\t\tfor y := uint32(0); y < 2; y++ {\n\t\t\t\tfor z := uint32(0); z < 2; z++ {\n\t\t\t\t\tsector := sector{x: x, y: y, z: z}\n\t\t\t\t\tfor _, star := range getSectorDetails(sector) {\n\t\t\t\t\t\tstar.id = id\n\t\t\t\t\t\tid++\n\t\t\t\t\t\tstars = append(stars, star)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif len(stars) > 0 {\n\t\t\tsphereModel = &gi3d.Sphere{}\n\t\t\tsphereModel.Reset()\n\t\t\tsphereModel = gi3d.AddNewSphere(sc, sName, 0.002, 24)\n\t\t\tlines = make([]*simpleLine, 0)\n\t\t\tsName = \"sphere\"\n\t\t\tfor _, star := range stars {\n\t\t\t\tstarSphere := gi3d.AddNewSolid(sc, sc, sName, sphereModel.Name())\n\t\t\t\tstarSphere.Pose.Pos.Set(star.x+offsets.x, star.y+offsets.y, star.z+offsets.z)\n\t\t\t\tstarSphere.Mat.Color.SetUInt8(star.brightColor.R, star.brightColor.G, star.brightColor.B, star.brightColor.A)\n\t\t\t}\n\t\t\tfor id, star := range stars {\n\t\t\t\tfor _, jump := range checkForJumps(stars, star, id) {\n\t\t\t\t\tlines = append(lines, jump)\n\t\t\t\t\tif jump.jumpInfo.distance < 3.0 {\n\t\t\t\t\t\tjumpsByStar[star.id] = append(jumpsByStar[star.id], jump.jumpInfo)\n\t\t\t\t\t\tif star.id == jump.jumpInfo.s2ID {\n\t\t\t\t\t\t\tjumpsByStar[jump.jumpInfo.s1ID] = append(jumpsByStar[jump.jumpInfo.s1ID], jump.jumpInfo)\n\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\tjumpsByStar[jump.jumpInfo.s2ID] = append(jumpsByStar[jump.jumpInfo.s2ID], jump.jumpInfo)\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif !fastest {\n\t\t\t\trendered = true\n\t\t\t\thighWater = -1\n\t\t\t\tfor lNumber := 0; lNumber < len(stars); lNumber++ {\n\t\t\t\t\ttJumps := traceJumps(lNumber)\n\t\t\t\t\tif len(tJumps) > highWater {\n\t\t\t\t\t\thighWater = len(tJumps)\n\t\t\t\t\t\tconnectedStar = lNumber\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tf, err := os.Create(\"traveler-report.csv\")\n\n\t\t\t\tif err == nil {\n\t\t\t\t\talreadyPrinted := make([]int, 0)\n\t\t\t\t\t_, err := f.Write([]byte(csvTextHdr))\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\tos.Exit(-1)\n\t\t\t\t\t}\n\t\t\t\t\tfor _, nextJump := range traceJumps(connectedStar) {\n\t\t\t\t\t\tif !contains(alreadyPrinted, nextJump.s1ID) && nextJump.s1ID > -1 {\n\t\t\t\t\t\t\t_, err := f.Write([]byte(worldFromStar(nextJump.s1ID).worldCSV))\n\t\t\t\t\t\t\tif err != nil {\n\t\t\t\t\t\t\t\tos.Exit(-1)\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\talreadyPrinted = append(alreadyPrinted, nextJump.s1ID)\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif !contains(alreadyPrinted, nextJump.s2ID) && nextJump.s2ID > -1 {\n\t\t\t\t\t\t\t_, err := f.Write([]byte(worldFromStar(nextJump.s2ID).worldCSV))\n\t\t\t\t\t\t\tif err != nil {\n\t\t\t\t\t\t\t\tos.Exit(-1)\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\talreadyPrinted = append(alreadyPrinted, nextJump.s2ID)\n\t\t\t\t\t\t}\n\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tif !faster {\n\t\t\t\t\tpopMax := 0\n\t\t\t\t\tbigWorld := worldFromStar(stars[0].id)\n\t\t\t\t\tbigStar := *stars[0]\n\t\t\t\t\ttechMax := 0\n\t\t\t\t\ttechWorld := worldFromStar(stars[0].id)\n\t\t\t\t\ttechStar := *stars[0]\n\n\t\t\t\t\tfor _, star := range stars {\n\t\t\t\t\t\tworld := worldFromStar(star.id)\n\t\t\t\t\t\tif world.techLevelBase > techMax {\n\t\t\t\t\t\t\ttechMax = world.techLevelBase\n\t\t\t\t\t\t\ttechWorld = world\n\t\t\t\t\t\t\ttechStar = *star\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif world.popBase > popMax {\n\t\t\t\t\t\t\tpopMax = world.popBase\n\t\t\t\t\t\t\tbigWorld = world\n\t\t\t\t\t\t\tbigStar = *star\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t\tif techMax > popMax {\n\t\t\t\t\t\ttechMax += 1\n\t\t\t\t\t}\n\t\t\t\t\tif bigWorld.popBase > popMax {\n\t\t\t\t\t\ttechMax += 1\n\t\t\t\t\t}\n\t\t\t\t\tif bigStar.pixels > 0 {\n\t\t\t\t\t\ttechMax += 1\n\t\t\t\t\t}\n\t\t\t\t\tif techWorld.popBase > popMax {\n\t\t\t\t\t\ttechMax += 1\n\t\t\t\t\t}\n\t\t\t\t\tif techStar.pixels > 0 {\n\t\t\t\t\t\ttechMax += 1\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\t\/\/ fastest case\n\t\t\tfor id, lin := range lines {\n\t\t\t\tthickness := float32(0.00010)\n\t\t\t\tif lin.jumpInfo.color.A < math.MaxUint8-47 {\n\t\t\t\t\tthickness = 0.00012\n\t\t\t\t} else if lin.jumpInfo.color.A < math.MaxUint8-39 {\n\t\t\t\t\tthickness = 0.00015\n\t\t\t\t}\n\t\t\t\tif lin.jumpInfo.s1ID != lin.jumpInfo.s2ID {\n\t\t\t\t\tlin.lines = gi3d.AddNewLines(sc, \"Lines-\"+strconv.Itoa(lin.jumpInfo.s1ID)+\"-\"+strconv.Itoa(lin.jumpInfo.s2ID),\n\t\t\t\t\t\t[]mat32.Vec3{\n\t\t\t\t\t\t\t{X: lin.from.x + offsets.x, Y: lin.from.y + offsets.y, Z: lin.from.z + offsets.z},\n\t\t\t\t\t\t\t{X: lin.to.x + offsets.x, Y: lin.to.y + offsets.y, Z: lin.to.z + offsets.z},\n\t\t\t\t\t\t},\n\t\t\t\t\t\tmat32.Vec2{X: thickness, Y: thickness},\n\t\t\t\t\t\tgi3d.OpenLines,\n\t\t\t\t\t)\n\t\t\t\t\tsolidLine := gi3d.AddNewSolid(sc, sc, \"Lines-\"+strconv.Itoa(id), lin.lines.Name())\n\t\t\t\t\t\/\/ solidLine.Pose.Pos.Set(lin.from.x - .5, lin.from.y - .5, lin.from.z + 8)\n\t\t\t\t\t\/\/ lns.Mat.Color.SetUInt8(255, 255, 0, 128)\n\t\t\t\t\tsolidLine.Mat.Color = lin.jumpInfo.color\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n}\n\nfunc checkForJumps(stars []*star, star *star, id int) (result []*simpleLine) {\n\tresult = make([]*simpleLine, 0)\n\tfor innerId, innerStar := range stars {\n\t\tif innerId == id {\n\t\t\tcontinue\n\t\t}\n\t\tjumpColor := checkFor1jump(star, innerStar)\n\t\tif jumpColor.color.A > 0 {\n\t\t\t\/\/ symmetric, so no copies\n\t\t\tresult = addIfNew(result, jumpColor)\n\t\t}\n\t}\n\tclosest := []*simpleLine{&noLine, &noLine, &noLine}\n\tif len(result) > 3 {\n\t\tfor _, nextSimpleLine := range result {\n\t\t\tif nextSimpleLine.jumpInfo.distance < closest[0].jumpInfo.distance {\n\t\t\t\tclosest[2] = closest[1]\n\t\t\t\tclosest[1] = closest[0]\n\t\t\t\tclosest[0] = nextSimpleLine\n\t\t\t} else if nextSimpleLine.jumpInfo.distance < closest[1].jumpInfo.distance {\n\t\t\t\tclosest[2] = closest[1]\n\t\t\t\tclosest[1] = nextSimpleLine\n\t\t\t} else if nextSimpleLine.jumpInfo.distance < closest[2].jumpInfo.distance {\n\t\t\t\tclosest[2] = nextSimpleLine\n\t\t\t}\n\t\t}\n\t\tresult = closest\n\t}\n\n\treturn\n}\n\nfunc checkFor1jump(s1 *star, s2 *star) (result *jump) {\n\tjumpLength := distance(s1, s2) * 100 * parsecsPerLightYear\n\tdelta := int(jumpLength)\n\tif delta < len(jumpColors) {\n\t\tif s1.id != s2.id {\n\t\t\tresult = &jump{jumpColors[delta], jumpColors[delta], delta, jumpLength, s1.id, s2.id}\n\t\t} else {\n\t\t\tresult = &noJump\n\t\t}\n\t} else {\n\t\tresult = &noJump\n\t}\n\t\/\/ Return transparent black if there isn't one\n\n\treturn\n}\n\nfunc addVisits(base []*jump, addition []*jump) (result []*jump) {\n\tresult = base\n\tfor _, nextJump := range addition {\n\t\talready := false\n\t\tfor _, baseJump := range base {\n\t\t\tif (baseJump.s1ID == nextJump.s1ID &&\n\t\t\t\tbaseJump.s2ID == nextJump.s2ID) ||\n\t\t\t\t(baseJump.s1ID == nextJump.s2ID &&\n\t\t\t\t\tbaseJump.s2ID == nextJump.s1ID) {\n\t\t\t\talready = true\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\tif !already {\n\t\t\tresult = append(result, nextJump)\n\t\t}\n\t}\n\n\treturn\n}\n\nfunc subtractVisits(base []*jump, subtraction []*jump) (result []*jump) {\n\tresult = make([]*jump, 0)\n\tfor _, baseJump := range base {\n\t\tif baseJump.s1ID != baseJump.s2ID {\n\t\t\tadd := true\n\t\t\tfor _, nextJump := range subtraction {\n\t\t\t\tif nextJump.s1ID != nextJump.s2ID {\n\t\t\t\t\tif (baseJump.s1ID == nextJump.s1ID &&\n\t\t\t\t\t\tbaseJump.s2ID == nextJump.s2ID) ||\n\t\t\t\t\t\t(baseJump.s1ID == nextJump.s2ID &&\n\t\t\t\t\t\t\tbaseJump.s2ID == nextJump.s1ID) {\n\t\t\t\t\t\tadd = false\n\t\t\t\t\t\tbreak\n\t\t\t\t\t}\n\t\t\t\t} else {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t}\n\t\t\tif add {\n\t\t\t\tresult = append(result, baseJump)\n\t\t\t}\n\t\t}\n\t}\n\treturn\n}\n\nfunc nextVisits(base []*jump, visited []*jump) (result []*jump) {\n\tresult = make([]*jump, 0)\n\tfor _, start := range base {\n\t\tfor _, nextJump := range jumpsByStar[start.s1ID] {\n\t\t\tresult, _ = maybeAppend(result, nextJump)\n\t\t}\n\t\tfor _, nextJump := range jumpsByStar[start.s2ID] {\n\t\t\tresult, _ = maybeAppend(result, nextJump)\n\t\t}\n\t}\n\tresult = subtractVisits(result, visited)\n\treturn\n}\n\nfunc maybeAppend(soFar []*jump, nextJump *jump) (result []*jump, yesAppend bool) {\n\tyesAppend = true\n\tresult = soFar\n\tfor _, alreadyJumps := range soFar {\n\t\tif (nextJump.s1ID == alreadyJumps.s1ID && nextJump.s2ID == alreadyJumps.s2ID) ||\n\t\t\t(nextJump.s2ID == alreadyJumps.s1ID && nextJump.s1ID == alreadyJumps.s2ID) {\n\t\t\tyesAppend = false\n\t\t\tbreak\n\t\t}\n\t}\n\tif yesAppend {\n\t\tresult = append(result, nextJump)\n\t}\n\treturn\n}\n\nfunc traceJumps(id int) (visited []*jump) {\n\texplore := jumpsByStar[id]\n\tvisited = explore\n\tfor longest := 0; longest < 48; longest++ {\n\t\tif len(explore) == 0 {\n\t\t\tbreak\n\t\t}\n\t\texplore = nextVisits(explore, visited)\n\t\tvisited = addVisits(visited, explore)\n\t}\n\treturn visited\n}\n\nfunc showStar(star star, sc *gi3d.Scene) {\n\tstarSphere := gi3d.AddNewSolid(sc, sc, sName, sphereModel.Name())\n\tstarSphere.Pose.Pos.Set(star.x+offsets.x, star.y+offsets.y, star.z+offsets.z)\n\tstarSphere.Mat.Color.SetUInt8(star.brightColor.R, star.brightColor.G, star.brightColor.B, star.brightColor.A)\n}\n\nfunc showBigStar(star star, sc *gi3d.Scene) {\n\tstarSphere := gi3d.AddNewSolid(sc, sc, sName, sphereModel.Name())\n\tstarSphere.Pose.Pos.Set(star.x+offsets.x, star.y+offsets.y, star.z+offsets.z)\n\tstarSphere.Mat.Color.SetUInt8(star.brightColor.R, star.brightColor.G, star.brightColor.B, star.brightColor.A)\n}\n\nfunc addIfNew(soFar []*simpleLine, jump *jump) (result []*simpleLine) {\n\tresult = soFar\n\tif jump.s1ID >= jump.s2ID {\n\t\treturn\n\t}\n\talready := false\n\tfor _, line := range result {\n\t\tif (line.jumpInfo.s1ID == jump.s1ID &&\n\t\t\tline.jumpInfo.s2ID == jump.s2ID) ||\n\t\t\t(line.jumpInfo.s1ID == jump.s2ID &&\n\t\t\t\tline.jumpInfo.s2ID == jump.s1ID) {\n\t\t\talready = true\n\t\t\tbreak\n\t\t}\n\t}\n\tif !already {\n\t\tnextLine := &simpleLine{\n\t\t\tfrom: position{x: stars[jump.s1ID].x, y: stars[jump.s1ID].y, z: stars[jump.s1ID].z},\n\t\t\tto: position{x: stars[jump.s2ID].x, y: stars[jump.s2ID].y, z: stars[jump.s2ID].z},\n\t\t\tjumpInfo: jump,\n\t\t}\n\t\tresult = append(result, nextLine)\n\t}\n\treturn\n}\n\nfunc contains(soFar []int, next int) (yes bool) {\n\tyes = false\n\tfor _, sID := range soFar {\n\t\tif sID == next {\n\t\t\tyes = true\n\t\t\tbreak\n\t\t}\n\t}\n\treturn\n}","avg_line_length":27.2352024922,"max_line_length":130,"alphanum_fraction":0.6189877037} +{"size":2391,"ext":"go","lang":"Go","max_stars_count":7.0,"content":"package rbac\n\nimport \"github.com\/pkg\/errors\"\n\ntype roleCache struct {\n\t\/\/ using model as cache model, and then can be replaced it\n\troles model\n}\n\nfunc newRoleCache() (*roleCache, error) {\n\tmodel, err := newModel()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &roleCache{roles: model}, nil\n}\n\n\/\/ Insert adds a role to the cache.\nfunc (cache *roleCache) Insert(role *Role) error {\n\t\/\/ The role name comes at the end of the arguments and is easy to find role by name\n\treturn cache.add(\"g\", \"g\", role.Description, role.Name)\n}\n\nfunc (cache *roleCache) add(sec string, ptype string, params ...string) error {\n\tif cache.roles.Add(sec, ptype, params) {\n\t\treturn nil\n\t}\n\n\treturn errors.Errorf(\"failed to add %s %s %v\", sec, ptype, params)\n}\n\n\/\/ Delete deletes a role in the cache.\nfunc (cache *roleCache) Delete(role string) error {\n\treturn cache.removeFiltered(\"g\", \"g\", 1, role)\n}\n\nfunc (cache *roleCache) removeFiltered(sec string, ptype string, fieldIndex int, fieldValues ...string) error {\n\tcache.roles.RemoveFilteredRule(\"g\", \"g\", fieldIndex, fieldValues...)\n\n\t\/\/ role is not exist and return nil\n\treturn nil\n}\n\n\/\/ Exist get a role from the cache if it exist.\nfunc (cache *roleCache) Exist(role string) (bool, error) {\n\troles := cache.getFiltered(\"g\", \"g\", 1, role)\n\treturn len(roles) > 0, nil\n}\n\n\/\/ Get gets a role by name from the cache.\nfunc (cache *roleCache) Get(name string) (*Role, error) {\n\troles := cache.getFiltered(\"g\", \"g\", 1, name)\n\tif len(roles) == 0 {\n\t\treturn nil, errors.New(\"role not exist\")\n\t}\n\n\treturn roles[0], nil\n}\n\nfunc (cache *roleCache) getFiltered(sec string, ptype string, fieldIndex int, fieldValues ...string) []*Role {\n\tvar roles []*Role\n\trules := cache.roles.GetFilteredRule(sec, ptype, fieldIndex, fieldValues...)\n\tfor _, rule := range rules {\n\t\tif len(rule) >= 2 {\n\t\t\trole := &Role{Name: rule[1], Description: rule[0]}\n\t\t\troles = append(roles, role)\n\t\t}\n\t}\n\n\treturn roles\n}\n\n\/\/ GetAll gets roles from the cache.\nfunc (cache *roleCache) GetAll() ([]*Role, error) {\n\tvar roles []*Role\n\trules := cache.roles.GetRule(\"g\", \"g\")\n\tfor _, rule := range rules {\n\t\tif len(rule) >= 2 {\n\t\t\trole := &Role{Name: rule[1], Description: rule[0]}\n\t\t\troles = append(roles, role)\n\t\t}\n\t}\n\n\treturn roles, nil\n}\n\n\/\/ Count gets total member of role from the cache.\nfunc (cache *roleCache) Count() (int64, error) {\n\tcount := cache.roles.Count(\"g\", \"g\")\n\treturn int64(count), nil\n}\n","avg_line_length":25.7096774194,"max_line_length":111,"alphanum_fraction":0.6712672522} +{"size":2675,"ext":"go","lang":"Go","max_stars_count":null,"content":"package pg\n\nimport (\n\t\"fmt\"\n\t\"os\"\n\t\"strings\"\n\n\t\"github.com\/enix\/wal-g\/pkg\/databases\/postgres\"\n\n\t\"github.com\/enix\/wal-g\/internal\"\n\t\"github.com\/pkg\/errors\"\n\t\"github.com\/spf13\/cobra\"\n\t\"github.com\/wal-g\/tracelog\"\n)\n\nconst (\n\tWalVerifyUsage = \"wal-verify\"\n\tWalVerifyShortDescription = \"Verify WAL storage folder. Available checks: integrity, timeline.\"\n\tWalVerifyLongDescription = \"Run a set of specified checks to ensure WAL storage health.\"\n\n\tuseJSONOutputFlag = \"json\"\n\tuseJSONOutputDescription = \"Show output in JSON format.\"\n\n\tcheckIntegrityArg = \"integrity\"\n\tcheckTimelineArg = \"timeline\"\n)\n\nvar (\n\tavailableChecks = map[string]postgres.WalVerifyCheckType{\n\t\tcheckIntegrityArg: postgres.WalVerifyIntegrityCheck,\n\t\tcheckTimelineArg: postgres.WalVerifyTimelineCheck,\n\t}\n\t\/\/ walVerifyCmd represents the walVerify command\n\twalVerifyCmd = &cobra.Command{\n\t\tUse: WalVerifyUsage,\n\t\tShort: WalVerifyShortDescription,\n\t\tLong: WalVerifyLongDescription,\n\t\tArgs: checkArgs,\n\t\tRun: func(cmd *cobra.Command, checks []string) {\n\t\t\tfolder, err := internal.ConfigureFolder()\n\t\t\ttracelog.ErrorLogger.FatalOnError(err)\n\t\t\toutputType := postgres.WalVerifyTableOutput\n\t\t\tif useJSONOutput {\n\t\t\t\toutputType = postgres.WalVerifyJSONOutput\n\t\t\t}\n\t\t\toutputWriter := postgres.NewWalVerifyOutputWriter(outputType, os.Stdout)\n\t\t\tcheckTypes := parseChecks(checks)\n\n\t\t\tpostgres.HandleWalVerify(checkTypes, folder, postgres.QueryCurrentWalSegment(), outputWriter)\n\t\t},\n\t}\n\tuseJSONOutput bool\n)\n\nfunc parseChecks(checks []string) []postgres.WalVerifyCheckType {\n\t\/\/ filter the possible duplicates\n\tuniqueChecks := make(map[string]bool)\n\tfor _, check := range checks {\n\t\tuniqueChecks[check] = true\n\t}\n\n\tcheckTypes := make([]postgres.WalVerifyCheckType, 0, len(checks))\n\tfor check := range uniqueChecks {\n\t\tcheckType, ok := availableChecks[check]\n\t\tif !ok {\n\t\t\ttracelog.ErrorLogger.Fatalf(\"Check %s is not available.\", check)\n\t\t}\n\t\tcheckTypes = append(checkTypes, checkType)\n\t}\n\treturn checkTypes\n}\n\nfunc checkArgs(cmd *cobra.Command, args []string) error {\n\tif len(args) < 1 {\n\t\tavailableCheckCommands := make([]string, 0, len(availableChecks))\n\t\tfor cmdName := range availableChecks {\n\t\t\tavailableCheckCommands = append(availableCheckCommands, cmdName)\n\t\t}\n\t\treturn errors.New(\"at least one of the following checks should be specified: \" +\n\t\t\tstrings.Join(availableCheckCommands, \", \"))\n\t}\n\tfor _, arg := range args {\n\t\tif _, ok := availableChecks[arg]; !ok {\n\t\t\treturn fmt.Errorf(\"invalid check specified: %s\", arg)\n\t\t}\n\t}\n\treturn nil\n}\n\nfunc init() {\n\tCmd.AddCommand(walVerifyCmd)\n\twalVerifyCmd.Flags().BoolVar(&useJSONOutput, useJSONOutputFlag, false, useJSONOutputDescription)\n}\n","avg_line_length":28.4574468085,"max_line_length":97,"alphanum_fraction":0.7375700935} +{"size":181405,"ext":"go","lang":"Go","max_stars_count":null,"content":"\/\/+build windows\n\npackage main\n\nvar icon []byte = []byte{\n\t0x00, 0x00, 0x01, 0x00, 0x05, 0x00, 0x10, 0x10, 0x00, 0x00, 0x01, 0x00,\n\t0x20, 0x00, 0x68, 0x04, 0x00, 0x00, 0x56, 0x00, 0x00, 0x00, 0x18, 0x18,\n\t0x00, 0x00, 0x01, 0x00, 0x20, 0x00, 0x88, 0x09, 0x00, 0x00, 0xbe, 0x04,\n\t0x00, 0x00, 0x20, 0x20, 0x00, 0x00, 0x01, 0x00, 0x20, 0x00, 0xa8, 0x10,\n\t0x00, 0x00, 0x46, 0x0e, 0x00, 0x00, 0x30, 0x30, 0x00, 0x00, 0x01, 0x00,\n\t0x20, 0x00, 0xa8, 0x25, 0x00, 0x00, 0xee, 0x1e, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x01, 0x00, 0x20, 0x00, 0xdc, 0x2f, 0x00, 0x00, 0x96, 0x44,\n\t0x00, 0x00, 0x28, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x20, 0x00,\n\t0x00, 0x00, 0x01, 0x00, 0x20, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0xff, 0xf2, 0xcf, 0x00, 0xf6, 0xd9, 0xc9, 0x00, 0xfc, 0xde,\n\t0xc9, 0x0b, 0xe7, 0xd9, 0xcc, 0x42, 0xe2, 0xd7, 0xcc, 0x4f, 0xf0, 0xd8,\n\t0xca, 0x13, 0xd2, 0xd1, 0xcb, 0x00, 0xfe, 0xdf, 0xca, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0xf1, 0xde, 0xd0, 0x00, 0xff, 0xf4, 0xd3, 0x00, 0xe8, 0xd9,\n\t0xcc, 0x38, 0xbe, 0xd3, 0xd4, 0xb0, 0x82, 0xcd, 0xe3, 0xf3, 0x90, 0xd7,\n\t0xe7, 0xf9, 0xc1, 0xd8, 0xd8, 0xc5, 0xe0, 0xd7, 0xcd, 0x50, 0xf5, 0xdb,\n\t0xcc, 0x04, 0xeb, 0xd9, 0xcd, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0xe5, 0xdc, 0xd3, 0x00, 0x39, 0x9f, 0xb9, 0x00, 0xe1, 0xd8,\n\t0xcd, 0x50, 0x9e, 0xcf, 0xdc, 0xe1, 0x3b, 0xc4, 0xf3, 0xff, 0x15, 0xc1,\n\t0xfe, 0xff, 0x4a, 0xd8, 0xff, 0xff, 0x6b, 0xdd, 0xf8, 0xff, 0xa6, 0xd9,\n\t0xe2, 0xf1, 0xd9, 0xd7, 0xcf, 0x73, 0xf5, 0xdb, 0xcc, 0x05, 0xe4, 0xda,\n\t0xcf, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0xeb, 0xe7, 0xe2, 0x00, 0xdf, 0xd7, 0xcd, 0x00, 0xe9, 0xd9,\n\t0xcc, 0x35, 0xa0, 0xcf, 0xdb, 0xdf, 0x27, 0xc2, 0xf8, 0xff, 0x0d, 0xbf,\n\t0xff, 0xff, 0x15, 0xc1, 0xfe, 0xff, 0x4b, 0xd8, 0xff, 0xff, 0x58, 0xde,\n\t0xff, 0xff, 0x5e, 0xde, 0xfc, 0xff, 0xa3, 0xda, 0xe2, 0xf2, 0xde, 0xd7,\n\t0xce, 0x59, 0xd0, 0xd3, 0xcc, 0x00, 0xe4, 0xde, 0xd6, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xe1, 0xda, 0xd2, 0x00, 0xff, 0xe1,\n\t0xc8, 0x06, 0xc4, 0xd4, 0xd3, 0xa5, 0x40, 0xc4, 0xf2, 0xff, 0x0d, 0xbf,\n\t0xff, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x15, 0xc1, 0xfe, 0xff, 0x4b, 0xd8,\n\t0xff, 0xff, 0x5a, 0xde, 0xff, 0xff, 0x58, 0xde, 0xff, 0xff, 0x67, 0xdd,\n\t0xf9, 0xff, 0xbd, 0xd8, 0xd9, 0xcc, 0xef, 0xd8, 0xca, 0x18, 0xe0, 0xd9,\n\t0xcf, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xdf, 0xd8,\n\t0xce, 0x00, 0xef, 0xda, 0xcb, 0x2f, 0x8d, 0xcd, 0xe0, 0xe9, 0x13, 0xbf,\n\t0xfd, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x15, 0xc1,\n\t0xfe, 0xff, 0x4b, 0xd8, 0xff, 0xff, 0x5a, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x57, 0xde, 0xff, 0xff, 0x93, 0xda, 0xe8, 0xfb, 0xe0, 0xd7,\n\t0xcd, 0x57, 0xda, 0xd6, 0xcd, 0x00, 0xef, 0xeb, 0xe7, 0x00, 0xff, 0xff,\n\t0xff, 0x00, 0xd7, 0xd5, 0xcd, 0x00, 0xdd, 0xd7, 0xce, 0x61, 0x5e, 0xc8,\n\t0xeb, 0xfe, 0x0d, 0xbf, 0xff, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x15, 0xc1, 0xfe, 0xff, 0x4b, 0xd8, 0xff, 0xff, 0x5a, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x57, 0xde, 0xff, 0xff, 0x76, 0xdc,\n\t0xf3, 0xff, 0xd0, 0xd8, 0xd2, 0x91, 0x67, 0xcc, 0xe1, 0x00, 0xe6, 0xe1,\n\t0xda, 0x00, 0xf0, 0xec, 0xe8, 0x00, 0xaf, 0xcd, 0xd2, 0x00, 0xce, 0xd5,\n\t0xd1, 0x84, 0x44, 0xc5, 0xf1, 0xff, 0x0d, 0xbf, 0xff, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x15, 0xc1, 0xfe, 0xff, 0x4b, 0xd8,\n\t0xff, 0xff, 0x5a, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x58, 0xde,\n\t0xff, 0xff, 0x69, 0xdd, 0xf9, 0xff, 0xc3, 0xd8, 0xd7, 0xb3, 0xff, 0xda,\n\t0xc0, 0x05, 0xe2, 0xdd, 0xd4, 0x00, 0xec, 0xe8, 0xe2, 0x00, 0x42, 0xb8,\n\t0xde, 0x00, 0xc7, 0xd4, 0xd3, 0x94, 0x3b, 0xc4, 0xf4, 0xff, 0x0d, 0xbf,\n\t0xff, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x15, 0xc1,\n\t0xfe, 0xff, 0x4b, 0xd8, 0xff, 0xff, 0x5a, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x58, 0xde, 0xff, 0xff, 0x64, 0xdd, 0xfb, 0xff, 0xbe, 0xd8,\n\t0xd9, 0xc1, 0xff, 0xda, 0xc5, 0x0a, 0xe2, 0xdc, 0xd4, 0x00, 0xec, 0xe9,\n\t0xe3, 0x00, 0x67, 0xc0, 0xda, 0x00, 0xc8, 0xd4, 0xd2, 0x91, 0x3c, 0xc4,\n\t0xf3, 0xff, 0x0d, 0xbf, 0xff, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x15, 0xc1, 0xfe, 0xff, 0x4b, 0xd8, 0xff, 0xff, 0x5a, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x58, 0xde, 0xff, 0xff, 0x65, 0xdd,\n\t0xfa, 0xff, 0xbf, 0xd8, 0xd9, 0xbf, 0xff, 0xd9, 0xc4, 0x09, 0xe2, 0xdc,\n\t0xd3, 0x00, 0xf2, 0xf0, 0xec, 0x00, 0xbb, 0xd0, 0xd0, 0x00, 0xd0, 0xd5,\n\t0xd1, 0x7f, 0x46, 0xc5, 0xf1, 0xff, 0x0c, 0xbf, 0xff, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x15, 0xc1, 0xfe, 0xff, 0x4b, 0xd8,\n\t0xff, 0xff, 0x5a, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x58, 0xde,\n\t0xff, 0xff, 0x69, 0xdd, 0xf8, 0xff, 0xc5, 0xd8, 0xd6, 0xaf, 0xff, 0xdb,\n\t0xbd, 0x04, 0xe3, 0xdd, 0xd5, 0x00, 0x00, 0x00, 0x00, 0x00, 0xd8, 0xd5,\n\t0xcd, 0x00, 0xdd, 0xd7, 0xce, 0x61, 0x72, 0xca, 0xe6, 0xff, 0x1d, 0xc0,\n\t0xfb, 0xff, 0x11, 0xbf, 0xfe, 0xff, 0x0d, 0xbf, 0xff, 0xff, 0x12, 0xc1,\n\t0xff, 0xff, 0x49, 0xd8, 0xff, 0xff, 0x58, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x5f, 0xde, 0xfd, 0xff, 0x85, 0xdb, 0xee, 0xff, 0xd1, 0xd8,\n\t0xd2, 0x92, 0x7e, 0xcd, 0xdc, 0x00, 0xe7, 0xe3, 0xdc, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0xdf, 0xd8, 0xce, 0x00, 0xe2, 0xd9, 0xce, 0x1e, 0xcd, 0xd5,\n\t0xd1, 0x8e, 0xa8, 0xd0, 0xda, 0xcc, 0x87, 0xcd, 0xe1, 0xeb, 0x6f, 0xca,\n\t0xe7, 0xf9, 0x63, 0xc9, 0xeb, 0xfe, 0x82, 0xd7, 0xec, 0xff, 0x93, 0xdb,\n\t0xe9, 0xfb, 0xa1, 0xda, 0xe3, 0xef, 0xb5, 0xd9, 0xdc, 0xd5, 0xcd, 0xd8,\n\t0xd3, 0xa0, 0xdf, 0xd8, 0xce, 0x34, 0xdb, 0xd6, 0xcc, 0x00, 0xed, 0xe9,\n\t0xe5, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xe6,\n\t0xc3, 0x00, 0xff, 0xea, 0xc0, 0x01, 0xfe, 0xde, 0xc7, 0x11, 0xf2, 0xda,\n\t0xca, 0x2d, 0xe6, 0xd8, 0xcc, 0x48, 0xdf, 0xd7, 0xcd, 0x5c, 0xde, 0xd7,\n\t0xce, 0x5f, 0xe2, 0xd7, 0xcc, 0x4e, 0xe9, 0xd7, 0xcb, 0x34, 0xf4, 0xd8,\n\t0xc8, 0x17, 0xff, 0xdb, 0xc3, 0x03, 0xfe, 0xd9, 0xc7, 0x00, 0xff, 0xff,\n\t0x5c, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff,\n\t0x00, 0x00, 0xfc, 0x3f, 0x00, 0x00, 0xf8, 0x0f, 0x00, 0x00, 0xf0, 0x07,\n\t0x00, 0x00, 0xe0, 0x07, 0x00, 0x00, 0xc0, 0x03, 0x00, 0x00, 0xc0, 0x03,\n\t0x00, 0x00, 0xc0, 0x03, 0x00, 0x00, 0xc0, 0x01, 0x00, 0x00, 0xc0, 0x01,\n\t0x00, 0x00, 0xc0, 0x01, 0x00, 0x00, 0xc0, 0x01, 0x00, 0x00, 0xc0, 0x03,\n\t0x00, 0x00, 0xc0, 0x03, 0x00, 0x00, 0xe0, 0x07, 0x00, 0x00, 0xff, 0xff,\n\t0x00, 0x00, 0x28, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x30, 0x00,\n\t0x00, 0x00, 0x01, 0x00, 0x20, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff,\n\t0xff, 0x00, 0xff, 0xff, 0xff, 0x00, 0xff, 0xff, 0xff, 0x00, 0xff, 0xff,\n\t0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0xe6, 0xe0, 0xd8, 0x00, 0xe5, 0xe4, 0xdf, 0x01, 0xe6, 0xdb,\n\t0xcf, 0x22, 0xe5, 0xd8, 0xcd, 0x6d, 0xe3, 0xd8, 0xcc, 0x82, 0xe4, 0xd9,\n\t0xce, 0x38, 0xe4, 0xdd, 0xd5, 0x06, 0xe3, 0xdb, 0xd1, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xec, 0xe8, 0xe3, 0x00, 0xe2, 0xd6,\n\t0xca, 0x00, 0xe4, 0xdb, 0xd1, 0x17, 0xe4, 0xd8, 0xcd, 0x7c, 0xd0, 0xd5,\n\t0xd0, 0xdb, 0xa0, 0xd0, 0xdc, 0xfd, 0xa3, 0xd6, 0xdf, 0xff, 0xcc, 0xd7,\n\t0xd3, 0xed, 0xe0, 0xd7, 0xcd, 0x9f, 0xe3, 0xd9, 0xcf, 0x30, 0xe7, 0xeb,\n\t0xe9, 0x01, 0xe5, 0xdf, 0xd7, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0xe5, 0xe0, 0xd8, 0x00, 0xe1, 0xcb, 0xba, 0x00, 0xe4, 0xd9,\n\t0xcf, 0x36, 0xdb, 0xd7, 0xce, 0xbf, 0xa3, 0xd0, 0xda, 0xfd, 0x48, 0xc5,\n\t0xf0, 0xff, 0x1a, 0xc1, 0xfd, 0xff, 0x49, 0xd7, 0xff, 0xff, 0x6f, 0xdd,\n\t0xf7, 0xff, 0xa3, 0xd9, 0xe2, 0xff, 0xd4, 0xd7, 0xd0, 0xe0, 0xe2, 0xd8,\n\t0xcd, 0x61, 0xe3, 0xdf, 0xd7, 0x05, 0xe2, 0xdc, 0xd3, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xe8, 0xe3,\n\t0xdc, 0x00, 0xde, 0xd2, 0xc5, 0x00, 0xe3, 0xd9, 0xce, 0x3c, 0xd4, 0xd6,\n\t0xcf, 0xd7, 0x79, 0xcb, 0xe4, 0xff, 0x1c, 0xc0, 0xfb, 0xff, 0x0d, 0xbf,\n\t0xff, 0xff, 0x13, 0xc1, 0xfe, 0xff, 0x49, 0xd7, 0xff, 0xff, 0x59, 0xdf,\n\t0xff, 0xff, 0x5a, 0xde, 0xfe, 0xff, 0x82, 0xdb, 0xef, 0xff, 0xca, 0xd7,\n\t0xd4, 0xf2, 0xe2, 0xd8, 0xcd, 0x71, 0xe4, 0xe1, 0xda, 0x03, 0xe2, 0xdc,\n\t0xd4, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xf1, 0xee, 0xeb, 0x00, 0xdf, 0xd8,\n\t0xcf, 0x00, 0xe4, 0xda, 0xd0, 0x25, 0xd8, 0xd6, 0xce, 0xce, 0x71, 0xca,\n\t0xe6, 0xff, 0x12, 0xbf, 0xfd, 0xff, 0x0f, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x14, 0xc1, 0xfe, 0xff, 0x49, 0xd7, 0xff, 0xff, 0x5a, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x57, 0xde, 0xff, 0xff, 0x7a, 0xdc,\n\t0xf2, 0xff, 0xcc, 0xd7, 0xd3, 0xef, 0xe2, 0xd8, 0xcd, 0x56, 0xda, 0xcf,\n\t0xc3, 0x00, 0xe6, 0xe1, 0xda, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xe3, 0xdd, 0xd5, 0x00, 0xe6, 0xe0,\n\t0xd8, 0x05, 0xe1, 0xd8, 0xcd, 0x99, 0x91, 0xcd, 0xdf, 0xff, 0x16, 0xc0,\n\t0xfc, 0xff, 0x0f, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x14, 0xc1, 0xfe, 0xff, 0x49, 0xd7, 0xff, 0xff, 0x5a, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x57, 0xde,\n\t0xff, 0xff, 0x89, 0xdb, 0xec, 0xff, 0xd8, 0xd7, 0xcf, 0xd2, 0xe3, 0xda,\n\t0xd0, 0x21, 0xe0, 0xd9, 0xd0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0xff, 0xff, 0xff, 0x00, 0xde, 0xd8, 0xce, 0x00, 0xe6, 0xda,\n\t0xce, 0x3c, 0xc4, 0xd3, 0xd3, 0xed, 0x33, 0xc3, 0xf5, 0xff, 0x0e, 0xbf,\n\t0xff, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x14, 0xc1, 0xfe, 0xff, 0x49, 0xd7, 0xff, 0xff, 0x5a, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x5d, 0xde, 0xfd, 0xff, 0xb2, 0xd8, 0xdd, 0xff, 0xe2, 0xd7,\n\t0xcd, 0x7b, 0xd1, 0xc6, 0xb8, 0x00, 0xe8, 0xe3, 0xdc, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0xe7, 0xe2, 0xdc, 0x00, 0x96, 0x81, 0x62, 0x00, 0xe3, 0xd8,\n\t0xcd, 0x8d, 0x87, 0xcc, 0xe1, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x14, 0xc1, 0xfe, 0xff, 0x49, 0xd7, 0xff, 0xff, 0x5a, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x57, 0xde, 0xff, 0xff, 0x81, 0xdb, 0xef, 0xff, 0xd8, 0xd7,\n\t0xcf, 0xcc, 0xe5, 0xdb, 0xd0, 0x14, 0xe1, 0xdb, 0xd2, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0xe1, 0xdb, 0xd3, 0x00, 0xe7, 0xdc, 0xd1, 0x12, 0xd6, 0xd6,\n\t0xcf, 0xcb, 0x4c, 0xc6, 0xef, 0xff, 0x0d, 0xbf, 0xff, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x14, 0xc1, 0xfe, 0xff, 0x49, 0xd7, 0xff, 0xff, 0x5a, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x58, 0xde, 0xff, 0xff, 0x64, 0xdd, 0xfb, 0xff, 0xc4, 0xd8,\n\t0xd6, 0xf4, 0xe5, 0xd8, 0xcd, 0x41, 0xde, 0xd7, 0xce, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0xdf, 0xd9, 0xcf, 0x00, 0xe9, 0xda, 0xce, 0x2f, 0xc0, 0xd3,\n\t0xd4, 0xeb, 0x29, 0xc2, 0xf8, 0xff, 0x0e, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x14, 0xc1, 0xfe, 0xff, 0x49, 0xd7, 0xff, 0xff, 0x5a, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x5a, 0xde, 0xff, 0xff, 0xac, 0xd9,\n\t0xdf, 0xff, 0xe3, 0xd7, 0xcc, 0x6f, 0xdb, 0xd4, 0xc9, 0x00, 0xfa, 0xf9,\n\t0xf8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0xde, 0xd7, 0xcd, 0x00, 0xe9, 0xd9, 0xcc, 0x4b, 0xab, 0xd1,\n\t0xd8, 0xf9, 0x19, 0xc0, 0xfc, 0xff, 0x0f, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x14, 0xc1, 0xfe, 0xff, 0x49, 0xd7, 0xff, 0xff, 0x5a, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x57, 0xde, 0xff, 0xff, 0x9a, 0xda,\n\t0xe6, 0xff, 0xe1, 0xd7, 0xcd, 0x92, 0xcb, 0xc1, 0xb2, 0x00, 0xed, 0xe9,\n\t0xe3, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0xdd, 0xd6, 0xcc, 0x00, 0xe8, 0xd9, 0xcc, 0x5f, 0x9c, 0xcf,\n\t0xdc, 0xfe, 0x13, 0xbf, 0xfd, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x14, 0xc1, 0xfe, 0xff, 0x49, 0xd7, 0xff, 0xff, 0x5a, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x57, 0xde, 0xff, 0xff, 0x8e, 0xdb,\n\t0xea, 0xff, 0xde, 0xd7, 0xcd, 0xa6, 0xf3, 0xec, 0xe6, 0x02, 0xe8, 0xe3,\n\t0xdd, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0xdc, 0xd5, 0xca, 0x00, 0xe7, 0xd9, 0xcc, 0x69, 0x96, 0xce,\n\t0xdd, 0xff, 0x11, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x14, 0xc1, 0xfe, 0xff, 0x49, 0xd7, 0xff, 0xff, 0x5a, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x57, 0xde, 0xff, 0xff, 0x89, 0xdb,\n\t0xec, 0xff, 0xdd, 0xd7, 0xce, 0xb1, 0xed, 0xe5, 0xde, 0x05, 0xe8, 0xe3,\n\t0xdd, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0xdc, 0xd5, 0xcb, 0x00, 0xe7, 0xd9, 0xcc, 0x65, 0x98, 0xce,\n\t0xdd, 0xff, 0x12, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x14, 0xc1, 0xfe, 0xff, 0x49, 0xd7, 0xff, 0xff, 0x5a, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x57, 0xde, 0xff, 0xff, 0x8b, 0xdb,\n\t0xeb, 0xff, 0xdd, 0xd7, 0xce, 0xad, 0xee, 0xe6, 0xdf, 0x04, 0xe8, 0xe3,\n\t0xdc, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0xdd, 0xd6, 0xcc, 0x00, 0xe8, 0xd9, 0xcc, 0x58, 0xa1, 0xcf,\n\t0xdb, 0xfd, 0x15, 0xc0, 0xfd, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x14, 0xc1, 0xfe, 0xff, 0x49, 0xd7, 0xff, 0xff, 0x5a, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x57, 0xde, 0xff, 0xff, 0x92, 0xda,\n\t0xe9, 0xff, 0xdf, 0xd7, 0xcd, 0xa0, 0xff, 0xff, 0xff, 0x01, 0xe9, 0xe5,\n\t0xde, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0xde, 0xd7, 0xce, 0x00, 0xe9, 0xda, 0xcd, 0x45, 0xaf, 0xd1,\n\t0xd7, 0xf7, 0x1b, 0xc0, 0xfb, 0xff, 0x0f, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x14, 0xc1, 0xfe, 0xff, 0x49, 0xd7, 0xff, 0xff, 0x5a, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x57, 0xde, 0xff, 0xff, 0x9d, 0xda,\n\t0xe4, 0xff, 0xe1, 0xd7, 0xcc, 0x8b, 0xd4, 0xcb, 0xbf, 0x00, 0xf0, 0xed,\n\t0xe9, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0xdf, 0xd9, 0xcf, 0x00, 0xe9, 0xda, 0xce, 0x2d, 0xc1, 0xd3,\n\t0xd3, 0xea, 0x2a, 0xc2, 0xf8, 0xff, 0x0b, 0xbe, 0xff, 0xff, 0x0e, 0xbf,\n\t0xff, 0xff, 0x0f, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x14, 0xc1, 0xfe, 0xff, 0x49, 0xd7, 0xff, 0xff, 0x5a, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x58, 0xde,\n\t0xff, 0xff, 0x57, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0xad, 0xd9,\n\t0xde, 0xff, 0xe4, 0xd7, 0xcc, 0x6d, 0xdc, 0xd4, 0xca, 0x00, 0xff, 0xff,\n\t0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0xe1, 0xdb, 0xd2, 0x00, 0xe5, 0xdb, 0xd1, 0x15, 0xd9, 0xd6,\n\t0xce, 0xc8, 0x9d, 0xcf, 0xdc, 0xff, 0x5f, 0xc8, 0xeb, 0xff, 0x37, 0xc3,\n\t0xf4, 0xff, 0x20, 0xc1, 0xfa, 0xff, 0x14, 0xbf, 0xfd, 0xff, 0x0f, 0xbf,\n\t0xfe, 0xff, 0x11, 0xc0, 0xff, 0xff, 0x47, 0xd7, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x5a, 0xde, 0xfe, 0xff, 0x60, 0xdd, 0xfc, 0xff, 0x6b, 0xdd,\n\t0xf8, 0xff, 0x82, 0xdb, 0xef, 0xff, 0xa3, 0xd9, 0xe2, 0xff, 0xd1, 0xd7,\n\t0xd1, 0xee, 0xe2, 0xd8, 0xce, 0x44, 0xde, 0xd7, 0xcd, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0xe5, 0xdf, 0xd7, 0x00, 0xe4, 0xe3, 0xde, 0x01, 0xe1, 0xda,\n\t0xd0, 0x2c, 0xe4, 0xd8, 0xcd, 0x76, 0xdc, 0xd7, 0xce, 0xb4, 0xcb, 0xd4,\n\t0xd1, 0xdf, 0xb6, 0xd2, 0xd6, 0xf4, 0xa0, 0xcf, 0xdb, 0xfd, 0x8d, 0xcd,\n\t0xe0, 0xff, 0x81, 0xcc, 0xe3, 0xff, 0x96, 0xd6, 0xe4, 0xff, 0xa5, 0xd9,\n\t0xe2, 0xff, 0xb0, 0xd9, 0xdd, 0xff, 0xbe, 0xd8, 0xd8, 0xf8, 0xcd, 0xd7,\n\t0xd3, 0xe7, 0xda, 0xd7, 0xce, 0xc5, 0xe1, 0xd7, 0xcc, 0x8c, 0xe2, 0xd8,\n\t0xce, 0x43, 0xe1, 0xdc, 0xd4, 0x08, 0xe1, 0xdb, 0xd2, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xf3, 0xef,\n\t0xea, 0x00, 0xd6, 0xbe, 0xa9, 0x00, 0xe8, 0xdf, 0xd5, 0x08, 0xe8, 0xdb,\n\t0xcf, 0x20, 0xe9, 0xda, 0xcd, 0x3e, 0xe8, 0xd9, 0xcc, 0x5c, 0xe6, 0xd9,\n\t0xcc, 0x76, 0xe4, 0xd8, 0xcc, 0x87, 0xe2, 0xd8, 0xcd, 0x8a, 0xe3, 0xd7,\n\t0xcc, 0x7d, 0xe4, 0xd8, 0xcc, 0x66, 0xe5, 0xd8, 0xcd, 0x49, 0xe6, 0xd9,\n\t0xce, 0x29, 0xe6, 0xdc, 0xd2, 0x0e, 0xec, 0xe9, 0xe3, 0x01, 0xea, 0xe4,\n\t0xdd, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0x00, 0xff, 0xff,\n\t0xff, 0x00, 0xff, 0x81, 0xff, 0x00, 0xff, 0x00, 0x7f, 0x00, 0xfe, 0x00,\n\t0x3f, 0x00, 0xfc, 0x00, 0x1f, 0x00, 0xf8, 0x00, 0x1f, 0x00, 0xf0, 0x00,\n\t0x0f, 0x00, 0xf0, 0x00, 0x0f, 0x00, 0xf0, 0x00, 0x07, 0x00, 0xe0, 0x00,\n\t0x07, 0x00, 0xe0, 0x00, 0x07, 0x00, 0xe0, 0x00, 0x07, 0x00, 0xe0, 0x00,\n\t0x03, 0x00, 0xe0, 0x00, 0x03, 0x00, 0xe0, 0x00, 0x03, 0x00, 0xe0, 0x00,\n\t0x03, 0x00, 0xe0, 0x00, 0x07, 0x00, 0xe0, 0x00, 0x07, 0x00, 0xe0, 0x00,\n\t0x07, 0x00, 0xe0, 0x00, 0x07, 0x00, 0xfc, 0x00, 0x1f, 0x00, 0xff, 0xff,\n\t0xff, 0x00, 0xff, 0xff, 0xff, 0x00, 0x28, 0x00, 0x00, 0x00, 0x20, 0x00,\n\t0x00, 0x00, 0x40, 0x00, 0x00, 0x00, 0x01, 0x00, 0x20, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xe5, 0xe0,\n\t0xd8, 0x00, 0xe7, 0xe2, 0xdb, 0x03, 0xe6, 0xe1, 0xda, 0x07, 0xf9, 0xf7,\n\t0xf5, 0x00, 0xf2, 0xef, 0xea, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xf5, 0xf3, 0xf0, 0x00, 0xdc, 0xd4,\n\t0xca, 0x00, 0xe4, 0xde, 0xd6, 0x0c, 0xe1, 0xda, 0xd0, 0x45, 0xe2, 0xd8,\n\t0xcd, 0x98, 0xe1, 0xd7, 0xcd, 0xb3, 0xe1, 0xd8, 0xcf, 0x6b, 0xe2, 0xdb,\n\t0xd3, 0x1f, 0xf0, 0xed, 0xe9, 0x01, 0xe8, 0xe3, 0xdc, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xe3, 0xdd,\n\t0xd5, 0x00, 0xe5, 0xe0, 0xd9, 0x06, 0xe1, 0xda, 0xd0, 0x48, 0xe2, 0xd8,\n\t0xcd, 0xb3, 0xda, 0xd6, 0xcd, 0xf3, 0xb8, 0xd2, 0xd5, 0xff, 0xb3, 0xd5,\n\t0xda, 0xff, 0xd3, 0xd7, 0xd0, 0xfc, 0xdf, 0xd7, 0xcc, 0xd6, 0xe0, 0xd8,\n\t0xce, 0x75, 0xe2, 0xdc, 0xd3, 0x18, 0xd8, 0xd0, 0xc4, 0x00, 0xef, 0xeb,\n\t0xe7, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xf0, 0xed,\n\t0xe8, 0x00, 0xdd, 0xd6, 0xcc, 0x00, 0xe1, 0xdc, 0xd3, 0x1a, 0xe1, 0xd8,\n\t0xce, 0x8f, 0xdd, 0xd6, 0xcd, 0xf0, 0xb0, 0xd1, 0xd7, 0xff, 0x5d, 0xc8,\n\t0xeb, 0xff, 0x25, 0xc2, 0xfa, 0xff, 0x4a, 0xd6, 0xfd, 0xff, 0x76, 0xdc,\n\t0xf4, 0xff, 0xa8, 0xd9, 0xe0, 0xff, 0xd4, 0xd7, 0xcf, 0xfc, 0xe0, 0xd7,\n\t0xcd, 0xc4, 0xe0, 0xda, 0xd0, 0x45, 0xeb, 0xe8, 0xe3, 0x02, 0xe5, 0xe0,\n\t0xd9, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0xec, 0xe8, 0xe2, 0x00, 0xdc, 0xd5, 0xcb, 0x00, 0xe0, 0xda,\n\t0xd1, 0x2b, 0xe1, 0xd8, 0xcd, 0xbc, 0xcd, 0xd4, 0xd0, 0xfe, 0x73, 0xca,\n\t0xe6, 0xff, 0x21, 0xc1, 0xfa, 0xff, 0x0d, 0xbf, 0xff, 0xff, 0x11, 0xc0,\n\t0xfe, 0xff, 0x45, 0xd6, 0xff, 0xff, 0x58, 0xdf, 0xff, 0xff, 0x5a, 0xde,\n\t0xfe, 0xff, 0x7b, 0xdc, 0xf1, 0xff, 0xbd, 0xd8, 0xd8, 0xff, 0xde, 0xd6,\n\t0xcc, 0xe8, 0xe0, 0xd9, 0xcf, 0x66, 0xe6, 0xe2, 0xdb, 0x05, 0xe4, 0xde,\n\t0xd7, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xf2, 0xef, 0xec, 0x00, 0xdd, 0xd6,\n\t0xcc, 0x00, 0xe0, 0xda, 0xd1, 0x2b, 0xe1, 0xd7, 0xcc, 0xc7, 0xbf, 0xd3,\n\t0xd3, 0xff, 0x4b, 0xc6, 0xf0, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x0f, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x12, 0xc0, 0xfe, 0xff, 0x46, 0xd6,\n\t0xff, 0xff, 0x5a, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x57, 0xde,\n\t0xff, 0xff, 0x65, 0xdd, 0xfa, 0xff, 0xaa, 0xd9, 0xdf, 0xff, 0xdc, 0xd6,\n\t0xcd, 0xf2, 0xe0, 0xd9, 0xcf, 0x6c, 0xea, 0xe7, 0xe1, 0x02, 0xe4, 0xdf,\n\t0xd7, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0xe1, 0xda, 0xd2, 0x00, 0xe2, 0xdc, 0xd4, 0x18, 0xe1, 0xd8,\n\t0xcd, 0xb9, 0xc0, 0xd3, 0xd3, 0xff, 0x40, 0xc4, 0xf2, 0xff, 0x0d, 0xbf,\n\t0xff, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x12, 0xc0, 0xfe, 0xff, 0x46, 0xd6, 0xff, 0xff, 0x5a, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x58, 0xde,\n\t0xff, 0xff, 0x60, 0xdd, 0xfc, 0xff, 0xa8, 0xd9, 0xe0, 0xff, 0xdd, 0xd6,\n\t0xcd, 0xec, 0xe0, 0xd9, 0xd0, 0x51, 0xd7, 0xce, 0xc2, 0x00, 0xe9, 0xe4,\n\t0xde, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xe5, 0xe0, 0xd8, 0x00, 0xed, 0xeb,\n\t0xe6, 0x02, 0xe1, 0xd8, 0xce, 0x86, 0xd0, 0xd5, 0xcf, 0xfe, 0x50, 0xc6,\n\t0xee, 0xff, 0x0d, 0xbf, 0xff, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x12, 0xc0,\n\t0xfe, 0xff, 0x46, 0xd6, 0xff, 0xff, 0x5a, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x58, 0xde,\n\t0xff, 0xff, 0x62, 0xdd, 0xfb, 0xff, 0xb6, 0xd8, 0xdb, 0xff, 0xe0, 0xd7,\n\t0xcc, 0xd2, 0xe1, 0xdb, 0xd2, 0x24, 0xe0, 0xd9, 0xd0, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff,\n\t0xff, 0x00, 0xde, 0xd7, 0xce, 0x00, 0xe1, 0xda, 0xd1, 0x39, 0xde, 0xd7,\n\t0xcc, 0xe8, 0x7e, 0xcb, 0xe3, 0xff, 0x11, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x12, 0xc0, 0xfe, 0xff, 0x46, 0xd6,\n\t0xff, 0xff, 0x5a, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x57, 0xde,\n\t0xff, 0xff, 0x73, 0xdc, 0xf4, 0xff, 0xcf, 0xd7, 0xd1, 0xff, 0xe0, 0xd8,\n\t0xce, 0x8c, 0xf3, 0xf1, 0xee, 0x02, 0xe6, 0xe0, 0xd9, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xe6, 0xe1, 0xd9, 0x00, 0xed, 0xea,\n\t0xe5, 0x03, 0xe2, 0xd8, 0xcd, 0x99, 0xbe, 0xd3, 0xd4, 0xff, 0x2a, 0xc2,\n\t0xf8, 0xff, 0x0e, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x12, 0xc0, 0xfe, 0xff, 0x46, 0xd6, 0xff, 0xff, 0x5a, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x9e, 0xd9, 0xe4, 0xff, 0xdf, 0xd7, 0xcc, 0xe2, 0xe1, 0xdb,\n\t0xd2, 0x2c, 0xe0, 0xd9, 0xd0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0xe0, 0xda, 0xd1, 0x00, 0xe1, 0xdb, 0xd2, 0x26, 0xe0, 0xd7,\n\t0xcc, 0xe0, 0x79, 0xcb, 0xe4, 0xff, 0x0e, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x12, 0xc0,\n\t0xfe, 0xff, 0x46, 0xd6, 0xff, 0xff, 0x5a, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x58, 0xde, 0xff, 0xff, 0x6f, 0xdc,\n\t0xf6, 0xff, 0xcf, 0xd7, 0xd1, 0xff, 0xe1, 0xd8, 0xce, 0x78, 0xd6, 0xce,\n\t0xc2, 0x00, 0xeb, 0xe7, 0xe1, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xf2, 0xef, 0xeb, 0x00, 0xdc, 0xd4,\n\t0xca, 0x00, 0xe2, 0xd9, 0xcf, 0x62, 0xd0, 0xd5, 0xd0, 0xfd, 0x3c, 0xc4,\n\t0xf3, 0xff, 0x0d, 0xbf, 0xff, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x12, 0xc0, 0xfe, 0xff, 0x46, 0xd6,\n\t0xff, 0xff, 0x5a, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x5b, 0xde, 0xfe, 0xff, 0xb1, 0xd8,\n\t0xdd, 0xff, 0xe1, 0xd7, 0xcc, 0xbc, 0xe4, 0xdf, 0xd7, 0x0b, 0xe4, 0xde,\n\t0xd6, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0xea, 0xe6, 0xdf, 0x00, 0xff, 0xff, 0xff, 0x01, 0xe3, 0xd8,\n\t0xcd, 0x9b, 0xb0, 0xd1, 0xd7, 0xff, 0x1b, 0xc0, 0xfb, 0xff, 0x0f, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x12, 0xc0, 0xfe, 0xff, 0x46, 0xd6, 0xff, 0xff, 0x5a, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x57, 0xde, 0xff, 0xff, 0x8f, 0xda, 0xea, 0xff, 0xde, 0xd6,\n\t0xcd, 0xe6, 0xe1, 0xda, 0xd1, 0x29, 0xe0, 0xda, 0xd1, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xe4, 0xde,\n\t0xd6, 0x00, 0xe4, 0xde, 0xd6, 0x0e, 0xe2, 0xd7, 0xcc, 0xc5, 0x8c, 0xcd,\n\t0xe0, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x12, 0xc0,\n\t0xfe, 0xff, 0x46, 0xd6, 0xff, 0xff, 0x5a, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x57, 0xde,\n\t0xff, 0xff, 0x77, 0xdc, 0xf3, 0xff, 0xd6, 0xd7, 0xcf, 0xf9, 0xe1, 0xd9,\n\t0xcf, 0x4e, 0xde, 0xd7, 0xcd, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xe1, 0xdb, 0xd2, 0x00, 0xe1, 0xdb,\n\t0xd2, 0x1f, 0xdf, 0xd7, 0xcc, 0xde, 0x6d, 0xc9, 0xe7, 0xff, 0x0d, 0xbf,\n\t0xff, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x12, 0xc0, 0xfe, 0xff, 0x46, 0xd6,\n\t0xff, 0xff, 0x5a, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x58, 0xde, 0xff, 0xff, 0x69, 0xdd,\n\t0xf9, 0xff, 0xcd, 0xd7, 0xd2, 0xff, 0xe1, 0xd8, 0xce, 0x6f, 0xda, 0xd3,\n\t0xc8, 0x00, 0xfb, 0xfa, 0xf9, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0xe0, 0xd9, 0xd0, 0x00, 0xe1, 0xda, 0xd1, 0x2f, 0xdc, 0xd6,\n\t0xcd, 0xec, 0x58, 0xc7, 0xec, 0xff, 0x0d, 0xbf, 0xff, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x12, 0xc0, 0xfe, 0xff, 0x46, 0xd6, 0xff, 0xff, 0x5a, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x58, 0xde, 0xff, 0xff, 0x62, 0xdd, 0xfc, 0xff, 0xc3, 0xd7,\n\t0xd6, 0xff, 0xe2, 0xd8, 0xcd, 0x87, 0xd3, 0xca, 0xbd, 0x00, 0xf4, 0xf1,\n\t0xee, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xdf, 0xd9,\n\t0xcf, 0x00, 0xe1, 0xda, 0xd1, 0x3b, 0xd9, 0xd6, 0xce, 0xf2, 0x4d, 0xc6,\n\t0xef, 0xff, 0x0d, 0xbf, 0xff, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x12, 0xc0,\n\t0xfe, 0xff, 0x46, 0xd6, 0xff, 0xff, 0x5a, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x5e, 0xde, 0xfd, 0xff, 0xbd, 0xd8, 0xd8, 0xff, 0xe2, 0xd8,\n\t0xcd, 0x96, 0xaa, 0x98, 0x7f, 0x00, 0xf1, 0xef, 0xeb, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xdf, 0xd8, 0xcf, 0x00, 0xe2, 0xda,\n\t0xd0, 0x40, 0xd7, 0xd6, 0xce, 0xf5, 0x48, 0xc5, 0xf0, 0xff, 0x0d, 0xbf,\n\t0xff, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x12, 0xc0, 0xfe, 0xff, 0x46, 0xd6,\n\t0xff, 0xff, 0x5a, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x5d, 0xde,\n\t0xfd, 0xff, 0xba, 0xd8, 0xd9, 0xff, 0xe2, 0xd8, 0xce, 0xa0, 0xff, 0xff,\n\t0xff, 0x01, 0xf1, 0xee, 0xe9, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0xdf, 0xd8, 0xcf, 0x00, 0xe2, 0xda, 0xd1, 0x3e, 0xd8, 0xd6,\n\t0xce, 0xf4, 0x4a, 0xc5, 0xf0, 0xff, 0x0d, 0xbf, 0xff, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x12, 0xc0, 0xfe, 0xff, 0x46, 0xd6, 0xff, 0xff, 0x5a, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x5e, 0xde, 0xfd, 0xff, 0xbc, 0xd8,\n\t0xd9, 0xff, 0xe2, 0xd8, 0xcd, 0x9a, 0x00, 0x00, 0x00, 0x00, 0xf1, 0xee,\n\t0xea, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xdf, 0xd9,\n\t0xcf, 0x00, 0xe1, 0xda, 0xd0, 0x34, 0xda, 0xd6, 0xcd, 0xef, 0x52, 0xc6,\n\t0xee, 0xff, 0x0d, 0xbf, 0xff, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x12, 0xc0,\n\t0xfe, 0xff, 0x46, 0xd6, 0xff, 0xff, 0x5a, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x60, 0xdd, 0xfc, 0xff, 0xc0, 0xd7, 0xd7, 0xff, 0xe2, 0xd8,\n\t0xcd, 0x8e, 0xcc, 0xc1, 0xb2, 0x00, 0xf3, 0xf0, 0xed, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xe0, 0xda, 0xd1, 0x00, 0xe1, 0xdb,\n\t0xd2, 0x28, 0xdd, 0xd7, 0xcd, 0xe6, 0x60, 0xc8, 0xea, 0xff, 0x0d, 0xbf,\n\t0xff, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x12, 0xc0, 0xfe, 0xff, 0x46, 0xd6,\n\t0xff, 0xff, 0x5a, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x58, 0xde, 0xff, 0xff, 0x64, 0xdd,\n\t0xfb, 0xff, 0xc7, 0xd7, 0xd4, 0xff, 0xe2, 0xd8, 0xcd, 0x7d, 0xd8, 0xd0,\n\t0xc4, 0x00, 0xf6, 0xf3, 0xf1, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0xe2, 0xdc, 0xd3, 0x00, 0xe2, 0xdc, 0xd3, 0x19, 0xe0, 0xd7,\n\t0xcc, 0xd8, 0x74, 0xca, 0xe5, 0xff, 0x0d, 0xbf, 0xff, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x12, 0xc0, 0xfe, 0xff, 0x46, 0xd6, 0xff, 0xff, 0x5a, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x58, 0xde, 0xff, 0xff, 0x6c, 0xdd, 0xf8, 0xff, 0xcf, 0xd7,\n\t0xd1, 0xff, 0xe1, 0xd8, 0xce, 0x66, 0xdc, 0xd4, 0xca, 0x00, 0xff, 0xff,\n\t0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xe4, 0xdf,\n\t0xd7, 0x00, 0xe4, 0xdf, 0xd7, 0x0c, 0xe2, 0xd8, 0xcc, 0xc1, 0x8d, 0xcd,\n\t0xdf, 0xff, 0x0d, 0xbf, 0xff, 0xff, 0x0f, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x12, 0xc0,\n\t0xfe, 0xff, 0x46, 0xd6, 0xff, 0xff, 0x5a, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x56, 0xde,\n\t0xff, 0xff, 0x77, 0xdc, 0xf3, 0xff, 0xd7, 0xd7, 0xcf, 0xf8, 0xe1, 0xd9,\n\t0xcf, 0x4a, 0xde, 0xd7, 0xcd, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xea, 0xe6, 0xe0, 0x00, 0xfa, 0xf9,\n\t0xf7, 0x02, 0xe2, 0xd8, 0xcd, 0xa1, 0xb9, 0xd2, 0xd5, 0xff, 0x45, 0xc5,\n\t0xf1, 0xff, 0x1d, 0xc0, 0xfb, 0xff, 0x0f, 0xbf, 0xfe, 0xff, 0x0d, 0xbf,\n\t0xff, 0xff, 0x0d, 0xbf, 0xff, 0xff, 0x0e, 0xbf, 0xfe, 0xff, 0x0f, 0xbf,\n\t0xfe, 0xff, 0x0f, 0xbf, 0xfe, 0xff, 0x12, 0xc0, 0xfe, 0xff, 0x46, 0xd6,\n\t0xff, 0xff, 0x5a, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x58, 0xde,\n\t0xff, 0xff, 0x58, 0xde, 0xff, 0xff, 0x57, 0xde, 0xff, 0xff, 0x57, 0xde,\n\t0xff, 0xff, 0x5c, 0xde, 0xfe, 0xff, 0x6a, 0xdd, 0xf8, 0xff, 0xa0, 0xd9,\n\t0xe3, 0xff, 0xdd, 0xd6, 0xcd, 0xea, 0xe1, 0xda, 0xd1, 0x2d, 0xe0, 0xd9,\n\t0xd0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0xf3, 0xf1, 0xed, 0x00, 0xd8, 0xd0, 0xc5, 0x00, 0xe0, 0xd9,\n\t0xd0, 0x5a, 0xde, 0xd7, 0xcd, 0xdb, 0xd2, 0xd5, 0xcf, 0xfb, 0xb2, 0xd1,\n\t0xd6, 0xff, 0x87, 0xcc, 0xe1, 0xff, 0x5e, 0xc8, 0xeb, 0xff, 0x3e, 0xc4,\n\t0xf3, 0xff, 0x2a, 0xc2, 0xf8, 0xff, 0x1e, 0xc1, 0xfb, 0xff, 0x17, 0xc0,\n\t0xfc, 0xff, 0x15, 0xc0, 0xfd, 0xff, 0x47, 0xd6, 0xfe, 0xff, 0x5d, 0xde,\n\t0xfe, 0xff, 0x5f, 0xde, 0xfd, 0xff, 0x65, 0xdd, 0xfa, 0xff, 0x6f, 0xdc,\n\t0xf6, 0xff, 0x80, 0xdb, 0xf0, 0xff, 0x98, 0xda, 0xe6, 0xff, 0xb5, 0xd8,\n\t0xdb, 0xff, 0xcd, 0xd7, 0xd2, 0xfe, 0xdc, 0xd6, 0xcd, 0xee, 0xdf, 0xd8,\n\t0xce, 0xa1, 0xe3, 0xdd, 0xd5, 0x0f, 0xe2, 0xdd, 0xd4, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0xe6, 0xe1, 0xd9, 0x00, 0xe9, 0xe5, 0xde, 0x02, 0xe2, 0xdb,\n\t0xd3, 0x20, 0xe1, 0xd9, 0xcf, 0x5b, 0xe3, 0xd8, 0xcd, 0x99, 0xe2, 0xd7,\n\t0xcc, 0xcd, 0xdd, 0xd6, 0xcd, 0xeb, 0xd2, 0xd5, 0xcf, 0xfa, 0xc4, 0xd3,\n\t0xd2, 0xff, 0xb6, 0xd2, 0xd6, 0xff, 0xa8, 0xd0, 0xd9, 0xff, 0x9e, 0xcf,\n\t0xdc, 0xff, 0xaa, 0xd6, 0xdd, 0xff, 0xb6, 0xd8, 0xdb, 0xff, 0xbe, 0xd8,\n\t0xd8, 0xff, 0xc8, 0xd7, 0xd4, 0xff, 0xd2, 0xd7, 0xd0, 0xfe, 0xda, 0xd6,\n\t0xce, 0xf3, 0xdf, 0xd7, 0xcc, 0xdc, 0xe1, 0xd7, 0xcc, 0xb3, 0xe1, 0xd8,\n\t0xce, 0x77, 0xe1, 0xda, 0xd1, 0x36, 0xe4, 0xdf, 0xd7, 0x0a, 0xdb, 0xd3,\n\t0xc9, 0x00, 0xf2, 0xef, 0xec, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xe7, 0xe3,\n\t0xdb, 0x00, 0xeb, 0xe7, 0xe1, 0x02, 0xe3, 0xdd, 0xd5, 0x13, 0xe1, 0xda,\n\t0xd1, 0x2f, 0xe2, 0xd9, 0xcf, 0x50, 0xe2, 0xd9, 0xce, 0x70, 0xe3, 0xd8,\n\t0xcd, 0x8c, 0xe3, 0xd8, 0xcd, 0xa2, 0xe3, 0xd8, 0xcd, 0xaf, 0xe2, 0xd7,\n\t0xcd, 0xb2, 0xe1, 0xd7, 0xcd, 0xa9, 0xe2, 0xd8, 0xcd, 0x97, 0xe1, 0xd8,\n\t0xce, 0x7f, 0xe1, 0xd9, 0xcf, 0x60, 0xe1, 0xda, 0xd1, 0x3e, 0xe2, 0xdc,\n\t0xd3, 0x1e, 0xe6, 0xe1, 0xda, 0x07, 0x46, 0x1f, 0x00, 0x00, 0xf6, 0xf5,\n\t0xf2, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0xfe, 0xfe, 0xfe, 0x00, 0xff, 0xff, 0xff, 0x00, 0xf4, 0xf2,\n\t0xef, 0x01, 0xec, 0xe7, 0xe2, 0x05, 0xea, 0xe6, 0xe0, 0x06, 0xf0, 0xed,\n\t0xe9, 0x03, 0xfb, 0xfa, 0xfa, 0x00, 0xf9, 0xf7, 0xf6, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfe,\n\t0x7f, 0xff, 0xff, 0xf8, 0x0f, 0xff, 0xff, 0xe0, 0x07, 0xff, 0xff, 0xc0,\n\t0x01, 0xff, 0xff, 0x80, 0x00, 0xff, 0xff, 0x00, 0x00, 0x7f, 0xfe, 0x00,\n\t0x00, 0x7f, 0xfc, 0x00, 0x00, 0x3f, 0xfc, 0x00, 0x00, 0x1f, 0xf8, 0x00,\n\t0x00, 0x1f, 0xf8, 0x00, 0x00, 0x1f, 0xf8, 0x00, 0x00, 0x0f, 0xf0, 0x00,\n\t0x00, 0x0f, 0xf0, 0x00, 0x00, 0x0f, 0xf0, 0x00, 0x00, 0x0f, 0xf0, 0x00,\n\t0x00, 0x0f, 0xf0, 0x00, 0x00, 0x0f, 0xf0, 0x00, 0x00, 0x07, 0xf0, 0x00,\n\t0x00, 0x0f, 0xf0, 0x00, 0x00, 0x0f, 0xf0, 0x00, 0x00, 0x0f, 0xf0, 0x00,\n\t0x00, 0x0f, 0xf0, 0x00, 0x00, 0x0f, 0xf0, 0x00, 0x00, 0x0f, 0xf8, 0x00,\n\t0x00, 0x0f, 0xf8, 0x00, 0x00, 0x1f, 0xff, 0x00, 0x00, 0xff, 0xff, 0xfc,\n\t0x3f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x28, 0x00,\n\t0x00, 0x00, 0x30, 0x00, 0x00, 0x00, 0x60, 0x00, 0x00, 0x00, 0x01, 0x00,\n\t0x20, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x24, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff,\n\t0xff, 0x00, 0xe6, 0xe1, 0xda, 0x00, 0xec, 0xe8, 0xe2, 0x04, 0xe3, 0xdd,\n\t0xd5, 0x26, 0xe1, 0xdb, 0xd3, 0x40, 0xe5, 0xdf, 0xd8, 0x18, 0xf7, 0xf6,\n\t0xf4, 0x01, 0xef, 0xec, 0xe8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0xf9, 0xf7, 0xf6, 0x00, 0xd4, 0xcb, 0xbe, 0x00, 0xe7, 0xe2,\n\t0xdb, 0x0c, 0xe1, 0xdb, 0xd3, 0x47, 0xdf, 0xd8, 0xcf, 0x9c, 0xde, 0xd7,\n\t0xcd, 0xde, 0xde, 0xd7, 0xcc, 0xf3, 0xde, 0xd7, 0xce, 0xcd, 0xe0, 0xd9,\n\t0xd0, 0x80, 0xe3, 0xdd, 0xd4, 0x2e, 0xec, 0xe8, 0xe2, 0x04, 0xe8, 0xe3,\n\t0xdc, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xfa, 0xf9, 0xf8, 0x00, 0xe3, 0xdd,\n\t0xd5, 0x00, 0xe8, 0xe3, 0xdc, 0x0a, 0xe1, 0xdb, 0xd2, 0x4e, 0xdf, 0xd8,\n\t0xce, 0xb4, 0xdf, 0xd7, 0xcc, 0xf1, 0xe0, 0xd6, 0xcb, 0xff, 0xd6, 0xd5,\n\t0xce, 0xff, 0xce, 0xd5, 0xd0, 0xff, 0xdc, 0xd6, 0xcc, 0xff, 0xdf, 0xd6,\n\t0xcb, 0xfe, 0xde, 0xd7, 0xcd, 0xe2, 0xdf, 0xd9, 0xcf, 0x8f, 0xe3, 0xdd,\n\t0xd5, 0x2d, 0xf1, 0xef, 0xeb, 0x02, 0xeb, 0xe7, 0xe0, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0xec, 0xe8, 0xe3, 0x00, 0xff, 0xff, 0xff, 0x01, 0xe2, 0xdd,\n\t0xd4, 0x30, 0xdf, 0xd8, 0xcf, 0xa4, 0xde, 0xd7, 0xcc, 0xf2, 0xdf, 0xd6,\n\t0xcb, 0xff, 0xcd, 0xd4, 0xd0, 0xff, 0x94, 0xce, 0xde, 0xff, 0x4f, 0xc6,\n\t0xef, 0xff, 0x59, 0xd3, 0xf6, 0xff, 0x8e, 0xdb, 0xea, 0xff, 0xbb, 0xd8,\n\t0xd9, 0xff, 0xd9, 0xd6, 0xcd, 0xff, 0xdf, 0xd6, 0xcb, 0xff, 0xde, 0xd7,\n\t0xcd, 0xde, 0xe0, 0xd9, 0xd0, 0x77, 0xe5, 0xe0, 0xd8, 0x15, 0xda, 0xd3,\n\t0xc8, 0x00, 0xf7, 0xf5, 0xf3, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xe5, 0xdf,\n\t0xd8, 0x00, 0xe8, 0xe3, 0xdc, 0x08, 0xe0, 0xda, 0xd1, 0x62, 0xde, 0xd7,\n\t0xcd, 0xdb, 0xdf, 0xd6, 0xcb, 0xff, 0xd2, 0xd5, 0xcf, 0xff, 0x91, 0xcd,\n\t0xdf, 0xff, 0x3e, 0xc4, 0xf3, 0xff, 0x14, 0xbf, 0xfd, 0xff, 0x0e, 0xbf,\n\t0xff, 0xff, 0x3e, 0xd3, 0xff, 0xff, 0x58, 0xdf, 0xff, 0xff, 0x60, 0xdd,\n\t0xfc, 0xff, 0x85, 0xdb, 0xed, 0xff, 0xbc, 0xd7, 0xd8, 0xff, 0xdc, 0xd6,\n\t0xcc, 0xff, 0xde, 0xd6, 0xcc, 0xfb, 0xdf, 0xd8, 0xce, 0xba, 0xe2, 0xdc,\n\t0xd4, 0x38, 0xff, 0xff, 0xff, 0x00, 0xeb, 0xe7, 0xe2, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xe3, 0xdd, 0xd4, 0x00, 0xe6, 0xe0,\n\t0xd9, 0x12, 0xdf, 0xd9, 0xd0, 0x8b, 0xde, 0xd7, 0xcc, 0xf4, 0xdd, 0xd6,\n\t0xcc, 0xff, 0xae, 0xd1, 0xd7, 0xff, 0x4d, 0xc6, 0xef, 0xff, 0x14, 0xbf,\n\t0xfd, 0xff, 0x0d, 0xbf, 0xff, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x11, 0xbf,\n\t0xfe, 0xff, 0x3f, 0xd3, 0xff, 0xff, 0x5a, 0xdf, 0xff, 0xff, 0x58, 0xde,\n\t0xff, 0xff, 0x57, 0xde, 0xff, 0xff, 0x62, 0xdd, 0xfb, 0xff, 0x93, 0xda,\n\t0xe8, 0xff, 0xce, 0xd7, 0xd2, 0xff, 0xdf, 0xd6, 0xcb, 0xff, 0xde, 0xd7,\n\t0xcd, 0xdd, 0xe1, 0xda, 0xd2, 0x55, 0xf0, 0xed, 0xe9, 0x03, 0xe8, 0xe4,\n\t0xdd, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff,\n\t0xff, 0x00, 0xe1, 0xdb, 0xd1, 0x00, 0xe3, 0xde, 0xd5, 0x14, 0xdf, 0xd8,\n\t0xcf, 0x9e, 0xde, 0xd6, 0xcc, 0xfc, 0xd7, 0xd5, 0xcd, 0xff, 0x86, 0xcc,\n\t0xe1, 0xff, 0x24, 0xc1, 0xf9, 0xff, 0x0d, 0xbf, 0xff, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x11, 0xbf,\n\t0xfe, 0xff, 0x3f, 0xd3, 0xff, 0xff, 0x5a, 0xdf, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x58, 0xde, 0xff, 0xff, 0x58, 0xde,\n\t0xff, 0xff, 0x75, 0xdc, 0xf4, 0xff, 0xbb, 0xd8, 0xd9, 0xff, 0xde, 0xd6,\n\t0xcc, 0xff, 0xde, 0xd7, 0xcd, 0xea, 0xe0, 0xda, 0xd1, 0x66, 0xef, 0xec,\n\t0xe7, 0x04, 0xe8, 0xe4, 0xdd, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xe3, 0xde,\n\t0xd6, 0x00, 0xe5, 0xe0, 0xd9, 0x11, 0xdf, 0xd8, 0xcf, 0x9d, 0xdf, 0xd6,\n\t0xcc, 0xfc, 0xd1, 0xd5, 0xcf, 0xff, 0x6a, 0xc9, 0xe8, 0xff, 0x15, 0xc0,\n\t0xfd, 0xff, 0x0e, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x11, 0xbf,\n\t0xfe, 0xff, 0x3f, 0xd3, 0xff, 0xff, 0x5a, 0xdf, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x57, 0xde, 0xff, 0xff, 0x66, 0xdd, 0xf9, 0xff, 0xae, 0xd8,\n\t0xde, 0xff, 0xdd, 0xd6, 0xcc, 0xff, 0xde, 0xd7, 0xcd, 0xee, 0xe1, 0xda,\n\t0xd1, 0x60, 0xff, 0xff, 0xff, 0x01, 0xe9, 0xe5, 0xdf, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xe7, 0xe2, 0xdb, 0x00, 0xeb, 0xe6,\n\t0xe1, 0x06, 0xe0, 0xd9, 0xd0, 0x87, 0xde, 0xd6, 0xcc, 0xfb, 0xd2, 0xd5,\n\t0xcf, 0xff, 0x63, 0xc8, 0xea, 0xff, 0x11, 0xbf, 0xfe, 0xff, 0x0f, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x11, 0xbf,\n\t0xfe, 0xff, 0x3f, 0xd3, 0xff, 0xff, 0x5a, 0xdf, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x58, 0xde, 0xff, 0xff, 0x62, 0xdd,\n\t0xfb, 0xff, 0xab, 0xd8, 0xdf, 0xff, 0xdd, 0xd6, 0xcc, 0xff, 0xde, 0xd7,\n\t0xcd, 0xe4, 0xe1, 0xdb, 0xd2, 0x47, 0xd1, 0xc8, 0xbb, 0x00, 0xf2, 0xef,\n\t0xec, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0xef, 0xec, 0xe8, 0x00, 0xc7, 0xbc, 0xab, 0x00, 0xe1, 0xdb,\n\t0xd2, 0x5b, 0xde, 0xd7, 0xcd, 0xf1, 0xd9, 0xd5, 0xcd, 0xff, 0x6f, 0xca,\n\t0xe7, 0xff, 0x11, 0xbf, 0xfe, 0xff, 0x0f, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x11, 0xbf,\n\t0xfe, 0xff, 0x3f, 0xd3, 0xff, 0xff, 0x5a, 0xdf, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x58, 0xde,\n\t0xff, 0xff, 0x63, 0xdd, 0xfa, 0xff, 0xb4, 0xd8, 0xdb, 0xff, 0xde, 0xd6,\n\t0xcb, 0xff, 0xde, 0xd7, 0xce, 0xcc, 0xe3, 0xdd, 0xd5, 0x24, 0xe0, 0xda,\n\t0xd1, 0x00, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0xe1, 0xdb, 0xd2, 0x00, 0xe3, 0xdd, 0xd5, 0x24, 0xde, 0xd7,\n\t0xce, 0xd1, 0xde, 0xd6, 0xcc, 0xff, 0x8f, 0xcd, 0xdf, 0xff, 0x17, 0xc0,\n\t0xfc, 0xff, 0x0f, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x11, 0xbf,\n\t0xfe, 0xff, 0x3f, 0xd3, 0xff, 0xff, 0x5a, 0xdf, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x57, 0xde, 0xff, 0xff, 0x6d, 0xdc, 0xf7, 0xff, 0xc6, 0xd7,\n\t0xd5, 0xff, 0xde, 0xd6, 0xcc, 0xff, 0xdf, 0xd9, 0xcf, 0x97, 0xec, 0xe7,\n\t0xe2, 0x07, 0xe7, 0xe2, 0xdb, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xea, 0xe5,\n\t0xdf, 0x00, 0xf7, 0xf5, 0xf3, 0x03, 0xdf, 0xd9, 0xcf, 0x8b, 0xdf, 0xd6,\n\t0xcc, 0xff, 0xba, 0xd2, 0xd5, 0xff, 0x2c, 0xc2, 0xf7, 0xff, 0x0e, 0xbf,\n\t0xff, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x11, 0xbf,\n\t0xfe, 0xff, 0x3f, 0xd3, 0xff, 0xff, 0x5a, 0xdf, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x57, 0xde, 0xff, 0xff, 0x85, 0xdb,\n\t0xed, 0xff, 0xd7, 0xd6, 0xce, 0xff, 0xde, 0xd7, 0xcd, 0xee, 0xe1, 0xdb,\n\t0xd2, 0x48, 0xdc, 0xd5, 0xcb, 0x00, 0xfa, 0xf9, 0xf8, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0x00, 0xe0, 0xda,\n\t0xd1, 0x00, 0xe3, 0xdd, 0xd5, 0x33, 0xde, 0xd7, 0xcd, 0xe4, 0xd9, 0xd6,\n\t0xcd, 0xff, 0x5f, 0xc8, 0xeb, 0xff, 0x0e, 0xbf, 0xff, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x11, 0xbf,\n\t0xfe, 0xff, 0x3f, 0xd3, 0xff, 0xff, 0x5a, 0xdf, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x5c, 0xde,\n\t0xfd, 0xff, 0xae, 0xd8, 0xde, 0xff, 0xdf, 0xd6, 0xcb, 0xff, 0xdf, 0xd8,\n\t0xce, 0xad, 0xe8, 0xe4, 0xdd, 0x0b, 0xe6, 0xe1, 0xda, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xee, 0xea, 0xe5, 0x00, 0xff, 0xff,\n\t0xff, 0x01, 0xdf, 0xd9, 0xcf, 0x88, 0xe0, 0xd6, 0xcb, 0xff, 0xab, 0xd0,\n\t0xd8, 0xff, 0x1d, 0xc1, 0xfb, 0xff, 0x0f, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x11, 0xbf,\n\t0xfe, 0xff, 0x3f, 0xd3, 0xff, 0xff, 0x5a, 0xdf, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x57, 0xde,\n\t0xff, 0xff, 0x78, 0xdc, 0xf2, 0xff, 0xd4, 0xd6, 0xcf, 0xff, 0xde, 0xd7,\n\t0xcc, 0xf0, 0xe1, 0xdb, 0xd3, 0x44, 0xde, 0xd7, 0xcd, 0x00, 0xff, 0xff,\n\t0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xe3, 0xdd, 0xd5, 0x00, 0xe4, 0xde,\n\t0xd7, 0x1e, 0xde, 0xd7, 0xcd, 0xd5, 0xdb, 0xd6, 0xcc, 0xff, 0x60, 0xc8,\n\t0xea, 0xff, 0x0d, 0xbf, 0xff, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x11, 0xbf,\n\t0xfe, 0xff, 0x3f, 0xd3, 0xff, 0xff, 0x5a, 0xdf, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x5c, 0xde, 0xfe, 0xff, 0xb0, 0xd8, 0xdd, 0xff, 0xdf, 0xd6,\n\t0xcb, 0xff, 0xdf, 0xd9, 0xcf, 0x96, 0xfd, 0xfc, 0xfc, 0x02, 0xeb, 0xe7,\n\t0xe1, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0xff, 0xff, 0xff, 0x00, 0xdb, 0xd4, 0xc9, 0x00, 0xe1, 0xda,\n\t0xd2, 0x59, 0xdf, 0xd7, 0xcc, 0xf9, 0xbe, 0xd3, 0xd3, 0xff, 0x28, 0xc2,\n\t0xf8, 0xff, 0x0e, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x11, 0xbf,\n\t0xfe, 0xff, 0x3f, 0xd3, 0xff, 0xff, 0x5a, 0xdf, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x57, 0xde, 0xff, 0xff, 0x84, 0xdb, 0xee, 0xff, 0xda, 0xd6,\n\t0xcd, 0xff, 0xde, 0xd7, 0xcd, 0xd6, 0xe4, 0xde, 0xd6, 0x1d, 0xe3, 0xdd,\n\t0xd5, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0xed, 0xea, 0xe4, 0x00, 0xff, 0xff, 0xff, 0x02, 0xdf, 0xd9,\n\t0xcf, 0x99, 0xe0, 0xd6, 0xcb, 0xff, 0x8c, 0xcd, 0xe0, 0xff, 0x11, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x11, 0xbf,\n\t0xfe, 0xff, 0x3f, 0xd3, 0xff, 0xff, 0x5a, 0xdf, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x58, 0xde, 0xff, 0xff, 0x67, 0xdd, 0xf9, 0xff, 0xc8, 0xd7,\n\t0xd4, 0xff, 0xdf, 0xd6, 0xcc, 0xf7, 0xe1, 0xda, 0xd2, 0x4f, 0xdd, 0xd6,\n\t0xcc, 0x00, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0xe5, 0xe0, 0xd8, 0x00, 0xe5, 0xe0, 0xd8, 0x14, 0xde, 0xd7,\n\t0xce, 0xcc, 0xdb, 0xd6, 0xcd, 0xff, 0x58, 0xc7, 0xec, 0xff, 0x0d, 0xbf,\n\t0xff, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x11, 0xbf,\n\t0xfe, 0xff, 0x3f, 0xd3, 0xff, 0xff, 0x5a, 0xdf, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x5a, 0xde, 0xfe, 0xff, 0xac, 0xd8,\n\t0xde, 0xff, 0xdf, 0xd6, 0xcb, 0xff, 0xdf, 0xd9, 0xcf, 0x87, 0xb5, 0xa7,\n\t0x91, 0x00, 0xf3, 0xf0, 0xec, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0xe0, 0xda, 0xd1, 0x00, 0xe2, 0xdc, 0xd4, 0x33, 0xdf, 0xd7,\n\t0xcd, 0xeb, 0xcb, 0xd4, 0xd0, 0xff, 0x33, 0xc3, 0xf5, 0xff, 0x0e, 0xbf,\n\t0xff, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x11, 0xbf,\n\t0xfe, 0xff, 0x3f, 0xd3, 0xff, 0xff, 0x5a, 0xdf, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x57, 0xde, 0xff, 0xff, 0x90, 0xda,\n\t0xe9, 0xff, 0xdd, 0xd6, 0xcc, 0xff, 0xde, 0xd8, 0xce, 0xb6, 0xea, 0xe6,\n\t0xe0, 0x09, 0xe9, 0xe4, 0xde, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff,\n\t0xff, 0x00, 0xdd, 0xd6, 0xcc, 0x00, 0xe1, 0xda, 0xd1, 0x55, 0xe0, 0xd7,\n\t0xcc, 0xfa, 0xb3, 0xd1, 0xd6, 0xff, 0x1d, 0xc1, 0xfb, 0xff, 0x0f, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x11, 0xbf,\n\t0xfe, 0xff, 0x3f, 0xd3, 0xff, 0xff, 0x5a, 0xdf, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x57, 0xde, 0xff, 0xff, 0x7a, 0xdc,\n\t0xf2, 0xff, 0xd7, 0xd6, 0xce, 0xff, 0xde, 0xd7, 0xcd, 0xd7, 0xe4, 0xdf,\n\t0xd7, 0x1b, 0xe4, 0xde, 0xd6, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xf7, 0xf5,\n\t0xf3, 0x00, 0xd6, 0xcd, 0xc1, 0x00, 0xe0, 0xd9, 0xd0, 0x75, 0xe0, 0xd6,\n\t0xcb, 0xff, 0x99, 0xce, 0xdd, 0xff, 0x13, 0xbf, 0xfd, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x11, 0xbf,\n\t0xfe, 0xff, 0x3f, 0xd3, 0xff, 0xff, 0x5a, 0xdf, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x58, 0xde, 0xff, 0xff, 0x6b, 0xdd,\n\t0xf8, 0xff, 0xce, 0xd7, 0xd1, 0xff, 0xdf, 0xd7, 0xcc, 0xeb, 0xe2, 0xdc,\n\t0xd4, 0x32, 0xe1, 0xdb, 0xd2, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xf2, 0xf0,\n\t0xec, 0x00, 0x7d, 0x64, 0x3d, 0x00, 0xdf, 0xd8, 0xcf, 0x90, 0xe0, 0xd6,\n\t0xcb, 0xff, 0x82, 0xcc, 0xe2, 0xff, 0x0f, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x11, 0xbf,\n\t0xfe, 0xff, 0x3f, 0xd3, 0xff, 0xff, 0x5a, 0xdf, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x58, 0xde, 0xff, 0xff, 0x63, 0xdd,\n\t0xfb, 0xff, 0xc4, 0xd7, 0xd5, 0xff, 0xdf, 0xd6, 0xcc, 0xf6, 0xe1, 0xdb,\n\t0xd2, 0x46, 0xde, 0xd8, 0xce, 0x00, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xef, 0xeb,\n\t0xe7, 0x00, 0xf8, 0xf6, 0xf4, 0x03, 0xdf, 0xd8, 0xcf, 0xa6, 0xdf, 0xd6,\n\t0xcb, 0xff, 0x70, 0xca, 0xe6, 0xff, 0x0d, 0xbf, 0xff, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x11, 0xbf,\n\t0xfe, 0xff, 0x3f, 0xd3, 0xff, 0xff, 0x5a, 0xdf, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x5e, 0xdd,\n\t0xfd, 0xff, 0xbb, 0xd8, 0xd9, 0xff, 0xdf, 0xd6, 0xcc, 0xfc, 0xe0, 0xda,\n\t0xd1, 0x5b, 0xdc, 0xd5, 0xca, 0x00, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xeb, 0xe7,\n\t0xe1, 0x00, 0xed, 0xe9, 0xe3, 0x07, 0xdf, 0xd8, 0xce, 0xb1, 0xde, 0xd6,\n\t0xcc, 0xff, 0x64, 0xc8, 0xea, 0xff, 0x0d, 0xbf, 0xff, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x11, 0xbf,\n\t0xfe, 0xff, 0x3f, 0xd3, 0xff, 0xff, 0x5a, 0xdf, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x5b, 0xde,\n\t0xfe, 0xff, 0xb4, 0xd8, 0xdb, 0xff, 0xdf, 0xd6, 0xcb, 0xfe, 0xe0, 0xd9,\n\t0xd0, 0x65, 0xda, 0xd3, 0xc8, 0x00, 0xfd, 0xfd, 0xfc, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xea, 0xe6,\n\t0xe0, 0x00, 0xeb, 0xe7, 0xe1, 0x0b, 0xdf, 0xd8, 0xce, 0xbd, 0xdc, 0xd6,\n\t0xcc, 0xff, 0x5d, 0xc8, 0xeb, 0xff, 0x0d, 0xbf, 0xff, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x11, 0xbf,\n\t0xfe, 0xff, 0x3f, 0xd3, 0xff, 0xff, 0x5a, 0xdf, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x5a, 0xde,\n\t0xfe, 0xff, 0xaf, 0xd8, 0xdd, 0xff, 0xdf, 0xd6, 0xcb, 0xff, 0xe1, 0xda,\n\t0xd1, 0x74, 0xd5, 0xcd, 0xc1, 0x00, 0xfb, 0xfa, 0xf9, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xe9, 0xe5,\n\t0xde, 0x00, 0xe9, 0xe5, 0xdf, 0x0b, 0xdf, 0xd8, 0xce, 0xbe, 0xdc, 0xd6,\n\t0xcc, 0xff, 0x5a, 0xc7, 0xec, 0xff, 0x0d, 0xbf, 0xff, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x11, 0xbf,\n\t0xfe, 0xff, 0x3f, 0xd3, 0xff, 0xff, 0x5a, 0xdf, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0xac, 0xd8, 0xde, 0xff, 0xdf, 0xd6, 0xcb, 0xff, 0xe1, 0xdb,\n\t0xd2, 0x7a, 0xd3, 0xca, 0xbd, 0x00, 0xfb, 0xfa, 0xf9, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xeb, 0xe6,\n\t0xe0, 0x00, 0xeb, 0xe7, 0xe1, 0x0b, 0xdf, 0xd8, 0xce, 0xbe, 0xdc, 0xd6,\n\t0xcc, 0xff, 0x5c, 0xc8, 0xeb, 0xff, 0x0d, 0xbf, 0xff, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x11, 0xbf,\n\t0xfe, 0xff, 0x3f, 0xd3, 0xff, 0xff, 0x5a, 0xdf, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x5a, 0xde,\n\t0xfe, 0xff, 0xae, 0xd8, 0xdd, 0xff, 0xdf, 0xd6, 0xcb, 0xff, 0xe1, 0xda,\n\t0xd2, 0x74, 0xd5, 0xcd, 0xc0, 0x00, 0xfb, 0xfa, 0xf9, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xeb, 0xe7,\n\t0xe1, 0x00, 0xec, 0xe9, 0xe3, 0x07, 0xdf, 0xd8, 0xce, 0xb2, 0xdd, 0xd6,\n\t0xcc, 0xff, 0x63, 0xc8, 0xea, 0xff, 0x0d, 0xbf, 0xff, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x11, 0xbf,\n\t0xfe, 0xff, 0x3f, 0xd3, 0xff, 0xff, 0x5a, 0xdf, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x5b, 0xde,\n\t0xfe, 0xff, 0xb4, 0xd8, 0xdb, 0xff, 0xdf, 0xd6, 0xcb, 0xfe, 0xe0, 0xd9,\n\t0xd0, 0x66, 0xda, 0xd2, 0xc8, 0x00, 0xfd, 0xfc, 0xfc, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xee, 0xea,\n\t0xe5, 0x00, 0xf5, 0xf2, 0xef, 0x04, 0xdf, 0xd8, 0xcf, 0xa8, 0xdf, 0xd6,\n\t0xcc, 0xff, 0x6d, 0xca, 0xe7, 0xff, 0x0d, 0xbf, 0xff, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x11, 0xbf,\n\t0xfe, 0xff, 0x3f, 0xd3, 0xff, 0xff, 0x5a, 0xdf, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x5d, 0xde,\n\t0xfd, 0xff, 0xb9, 0xd8, 0xd9, 0xff, 0xdf, 0xd6, 0xcc, 0xfc, 0xe0, 0xda,\n\t0xd1, 0x5d, 0xdb, 0xd4, 0xca, 0x00, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xf1, 0xef,\n\t0xeb, 0x00, 0xff, 0xff, 0xff, 0x00, 0xdf, 0xd8, 0xcf, 0x96, 0xe0, 0xd6,\n\t0xcb, 0xff, 0x7c, 0xcb, 0xe4, 0xff, 0x0e, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x11, 0xbf,\n\t0xfe, 0xff, 0x3f, 0xd3, 0xff, 0xff, 0x5a, 0xdf, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x61, 0xdd,\n\t0xfc, 0xff, 0xc1, 0xd7, 0xd6, 0xff, 0xdf, 0xd6, 0xcc, 0xf8, 0xe1, 0xdb,\n\t0xd2, 0x4c, 0xde, 0xd7, 0xcd, 0x00, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xf4, 0xf2,\n\t0xef, 0x00, 0xce, 0xc4, 0xb5, 0x00, 0xdf, 0xd9, 0xd0, 0x82, 0xe0, 0xd6,\n\t0xcb, 0xff, 0x8e, 0xcd, 0xdf, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x11, 0xbf,\n\t0xfe, 0xff, 0x3f, 0xd3, 0xff, 0xff, 0x5a, 0xdf, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x58, 0xde, 0xff, 0xff, 0x67, 0xdd,\n\t0xf9, 0xff, 0xca, 0xd7, 0xd3, 0xff, 0xdf, 0xd6, 0xcc, 0xf0, 0xe2, 0xdc,\n\t0xd3, 0x3a, 0xe0, 0xd9, 0xd0, 0x00, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xfe, 0xfd,\n\t0xfd, 0x00, 0xd9, 0xd1, 0xc6, 0x00, 0xe0, 0xda, 0xd1, 0x69, 0xe0, 0xd7,\n\t0xcb, 0xff, 0xa3, 0xcf, 0xda, 0xff, 0x16, 0xc0, 0xfd, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x11, 0xbf,\n\t0xfe, 0xff, 0x3f, 0xd3, 0xff, 0xff, 0x5a, 0xdf, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x58, 0xde, 0xff, 0xff, 0x70, 0xdc,\n\t0xf6, 0xff, 0xd2, 0xd6, 0xd0, 0xff, 0xde, 0xd7, 0xcd, 0xe4, 0xe3, 0xdd,\n\t0xd5, 0x27, 0xe2, 0xdc, 0xd3, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff,\n\t0xff, 0x00, 0xde, 0xd7, 0xcd, 0x00, 0xe1, 0xdb, 0xd2, 0x4c, 0xe0, 0xd7,\n\t0xcc, 0xf8, 0xb7, 0xd2, 0xd5, 0xff, 0x1f, 0xc1, 0xfa, 0xff, 0x0f, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x11, 0xbf,\n\t0xfe, 0xff, 0x3f, 0xd3, 0xff, 0xff, 0x5a, 0xdf, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x57, 0xde, 0xff, 0xff, 0x7c, 0xdb,\n\t0xf1, 0xff, 0xd9, 0xd6, 0xcd, 0xff, 0xde, 0xd7, 0xcd, 0xd1, 0xe5, 0xe0,\n\t0xd8, 0x16, 0xe5, 0xe0, 0xd8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0xe1, 0xda, 0xd2, 0x00, 0xe2, 0xdc, 0xd4, 0x32, 0xdf, 0xd7,\n\t0xcd, 0xeb, 0xc9, 0xd4, 0xd1, 0xff, 0x2f, 0xc2, 0xf7, 0xff, 0x0b, 0xbe,\n\t0xff, 0xff, 0x0f, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x11, 0xbf,\n\t0xfe, 0xff, 0x3f, 0xd3, 0xff, 0xff, 0x5a, 0xdf, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x58, 0xde, 0xff, 0xff, 0x55, 0xde, 0xff, 0xff, 0x8d, 0xda,\n\t0xea, 0xff, 0xdd, 0xd6, 0xcc, 0xff, 0xdf, 0xd8, 0xce, 0xb6, 0xeb, 0xe7,\n\t0xe1, 0x09, 0xea, 0xe5, 0xdf, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0xe4, 0xde, 0xd6, 0x00, 0xe4, 0xdf, 0xd7, 0x19, 0xde, 0xd7,\n\t0xcd, 0xd5, 0xd9, 0xd6, 0xcd, 0xff, 0x8b, 0xcd, 0xe0, 0xff, 0x41, 0xc4,\n\t0xf2, 0xff, 0x1d, 0xc0, 0xfb, 0xff, 0x0f, 0xbf, 0xfe, 0xff, 0x0d, 0xbf,\n\t0xff, 0xff, 0x0e, 0xbf, 0xff, 0xff, 0x0f, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf,\n\t0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x10, 0xbf, 0xfe, 0xff, 0x11, 0xbf,\n\t0xfe, 0xff, 0x3f, 0xd3, 0xff, 0xff, 0x5a, 0xdf, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde,\n\t0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x59, 0xde, 0xff, 0xff, 0x58, 0xde,\n\t0xff, 0xff, 0x57, 0xde, 0xff, 0xff, 0x57, 0xde, 0xff, 0xff, 0x5a, 0xde,\n\t0xfe, 0xff, 0x66, 0xdd, 0xfa, 0xff, 0x82, 0xdb, 0xef, 0xff, 0xbc, 0xd8,\n\t0xd8, 0xff, 0xde, 0xd6, 0xcc, 0xff, 0xdf, 0xd8, 0xcf, 0x93, 0x00, 0x00,\n\t0x00, 0x00, 0xf1, 0xee, 0xea, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0xe9, 0xe5, 0xde, 0x00, 0xeb, 0xe7, 0xe1, 0x08, 0xdf, 0xd8,\n\t0xcf, 0xa6, 0xdd, 0xd6, 0xcc, 0xfe, 0xde, 0xd6, 0xcc, 0xff, 0xd2, 0xd5,\n\t0xcf, 0xff, 0xb1, 0xd1, 0xd7, 0xff, 0x85, 0xcc, 0xe2, 0xff, 0x56, 0xc7,\n\t0xed, 0xff, 0x34, 0xc3, 0xf5, 0xff, 0x1f, 0xc1, 0xfa, 0xff, 0x13, 0xbf,\n\t0xfd, 0xff, 0x0e, 0xbf, 0xfe, 0xff, 0x0d, 0xbf, 0xff, 0xff, 0x0d, 0xbf,\n\t0xff, 0xff, 0x0d, 0xbf, 0xff, 0xff, 0x0e, 0xbf, 0xff, 0xff, 0x0f, 0xbf,\n\t0xfe, 0xff, 0x3e, 0xd3, 0xff, 0xff, 0x59, 0xdf, 0xff, 0xff, 0x57, 0xde,\n\t0xff, 0xff, 0x57, 0xde, 0xff, 0xff, 0x57, 0xde, 0xff, 0xff, 0x57, 0xde,\n\t0xff, 0xff, 0x58, 0xde, 0xff, 0xff, 0x5c, 0xde, 0xfe, 0xff, 0x65, 0xdd,\n\t0xfa, 0xff, 0x75, 0xdc, 0xf4, 0xff, 0x8f, 0xda, 0xea, 0xff, 0xac, 0xd8,\n\t0xde, 0xff, 0xc8, 0xd7, 0xd4, 0xff, 0xda, 0xd6, 0xcd, 0xff, 0xde, 0xd6,\n\t0xcc, 0xff, 0xde, 0xd7, 0xcd, 0xf4, 0xe0, 0xda, 0xd1, 0x60, 0xd9, 0xd1,\n\t0xc6, 0x00, 0xf9, 0xf7, 0xf6, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0xf2, 0xef, 0xeb, 0x00, 0xbd, 0xb0, 0x9d, 0x00, 0xe4, 0xde,\n\t0xd6, 0x1d, 0xe0, 0xda, 0xd1, 0x66, 0xdf, 0xd8, 0xce, 0xb3, 0xde, 0xd7,\n\t0xcd, 0xe6, 0xe0, 0xd7, 0xcc, 0xfc, 0xe0, 0xd6, 0xcb, 0xff, 0xda, 0xd6,\n\t0xcd, 0xff, 0xcc, 0xd4, 0xd0, 0xff, 0xb5, 0xd1, 0xd6, 0xff, 0x99, 0xce,\n\t0xdd, 0xff, 0x7e, 0xcb, 0xe3, 0xff, 0x66, 0xc9, 0xe9, 0xff, 0x52, 0xc6,\n\t0xee, 0xff, 0x42, 0xc5, 0xf2, 0xff, 0x38, 0xc3, 0xf4, 0xff, 0x30, 0xc3,\n\t0xf6, 0xff, 0x54, 0xd3, 0xf8, 0xff, 0x6e, 0xdd, 0xf7, 0xff, 0x72, 0xdc,\n\t0xf5, 0xff, 0x79, 0xdc, 0xf2, 0xff, 0x84, 0xdb, 0xee, 0xff, 0x92, 0xda,\n\t0xe8, 0xff, 0xa3, 0xd9, 0xe2, 0xff, 0xb7, 0xd8, 0xdb, 0xff, 0xc7, 0xd7,\n\t0xd4, 0xff, 0xd5, 0xd6, 0xcf, 0xff, 0xdd, 0xd6, 0xcc, 0xff, 0xdf, 0xd6,\n\t0xcb, 0xff, 0xdf, 0xd6, 0xcc, 0xf7, 0xde, 0xd7, 0xcd, 0xd6, 0xdf, 0xd8,\n\t0xcf, 0x98, 0xe1, 0xdb, 0xd2, 0x4a, 0xe7, 0xe2, 0xda, 0x0c, 0xe4, 0xde,\n\t0xd6, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xfa, 0xf9,\n\t0xf7, 0x00, 0xcc, 0xc3, 0xb4, 0x00, 0xe9, 0xe5, 0xde, 0x0a, 0xe2, 0xdd,\n\t0xd4, 0x2d, 0xe0, 0xda, 0xd1, 0x62, 0xdf, 0xd8, 0xcf, 0x9c, 0xde, 0xd7,\n\t0xce, 0xce, 0xdf, 0xd7, 0xcd, 0xea, 0xe0, 0xd7, 0xcc, 0xf9, 0xe0, 0xd6,\n\t0xcb, 0xff, 0xe0, 0xd6, 0xcb, 0xff, 0xde, 0xd6, 0xcc, 0xff, 0xda, 0xd6,\n\t0xcd, 0xff, 0xd4, 0xd5, 0xce, 0xff, 0xcf, 0xd4, 0xcf, 0xff, 0xc9, 0xd4,\n\t0xd1, 0xff, 0xcb, 0xd5, 0xd1, 0xff, 0xd0, 0xd6, 0xd1, 0xff, 0xd4, 0xd6,\n\t0xcf, 0xff, 0xd8, 0xd6, 0xce, 0xff, 0xdb, 0xd6, 0xcd, 0xff, 0xde, 0xd6,\n\t0xcc, 0xff, 0xdf, 0xd6, 0xcb, 0xff, 0xdf, 0xd6, 0xcb, 0xfe, 0xdf, 0xd6,\n\t0xcc, 0xf6, 0xde, 0xd7, 0xcd, 0xe1, 0xde, 0xd8, 0xce, 0xbd, 0xdf, 0xd9,\n\t0xcf, 0x8a, 0xe1, 0xdb, 0xd2, 0x50, 0xe4, 0xde, 0xd6, 0x1d, 0xee, 0xea,\n\t0xe5, 0x03, 0xe8, 0xe3, 0xdd, 0x00, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff,\n\t0xff, 0x00, 0xea, 0xe5, 0xdf, 0x00, 0xf0, 0xec, 0xe8, 0x03, 0xe6, 0xe1,\n\t0xd9, 0x15, 0xe2, 0xdc, 0xd4, 0x30, 0xe1, 0xdb, 0xd2, 0x52, 0xe0, 0xd9,\n\t0xd0, 0x78, 0xdf, 0xd8, 0xcf, 0x98, 0xdf, 0xd8, 0xce, 0xb3, 0xde, 0xd7,\n\t0xce, 0xca, 0xdf, 0xd7, 0xcd, 0xda, 0xdf, 0xd7, 0xcd, 0xe2, 0xdf, 0xd7,\n\t0xcd, 0xea, 0xdf, 0xd7, 0xcc, 0xeb, 0xdf, 0xd7, 0xcd, 0xe9, 0xde, 0xd7,\n\t0xcd, 0xe0, 0xde, 0xd7, 0xcd, 0xd5, 0xde, 0xd7, 0xce, 0xc4, 0xdf, 0xd8,\n\t0xce, 0xad, 0xdf, 0xd9, 0xcf, 0x8f, 0xe0, 0xda, 0xd1, 0x6e, 0xe1, 0xdb,\n\t0xd3, 0x48, 0xe3, 0xde, 0xd6, 0x26, 0xe8, 0xe3, 0xdc, 0x0c, 0xfa, 0xfa,\n\t0xf8, 0x01, 0xf3, 0xf1, 0xee, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0x00, 0xf4, 0xf2,\n\t0xef, 0x00, 0xfa, 0xf9, 0xf8, 0x01, 0xec, 0xe8, 0xe2, 0x07, 0xe6, 0xe1,\n\t0xda, 0x11, 0xe4, 0xdf, 0xd7, 0x1d, 0xe3, 0xdd, 0xd5, 0x25, 0xe3, 0xdd,\n\t0xd5, 0x2f, 0xe2, 0xdc, 0xd4, 0x30, 0xe4, 0xde, 0xd6, 0x2e, 0xe3, 0xdd,\n\t0xd5, 0x23, 0xe5, 0xe0, 0xd8, 0x19, 0xe8, 0xe3, 0xdc, 0x0e, 0xef, 0xeb,\n\t0xe7, 0x06, 0xff, 0xff, 0xff, 0x00, 0xfc, 0xfb, 0xfa, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, 0xff, 0xff,\n\t0xff, 0xff, 0xff, 0xff, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,\n\t0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, 0xff, 0xff,\n\t0xfc, 0x1f, 0xff, 0xff, 0x00, 0x00, 0xff, 0xff, 0xf0, 0x07, 0xff, 0xff,\n\t0x00, 0x00, 0xff, 0xff, 0xc0, 0x01, 0xff, 0xff, 0x00, 0x00, 0xff, 0xff,\n\t0x00, 0x00, 0xff, 0xff, 0x00, 0x00, 0xff, 0xfe, 0x00, 0x00, 0x7f, 0xff,\n\t0x00, 0x00, 0xff, 0xfc, 0x00, 0x00, 0x1f, 0xff, 0x00, 0x00, 0xff, 0xf8,\n\t0x00, 0x00, 0x0f, 0xff, 0x00, 0x00, 0xff, 0xf0, 0x00, 0x00, 0x07, 0xff,\n\t0x00, 0x00, 0xff, 0xe0, 0x00, 0x00, 0x07, 0xff, 0x00, 0x00, 0xff, 0xe0,\n\t0x00, 0x00, 0x03, 0xff, 0x00, 0x00, 0xff, 0xc0, 0x00, 0x00, 0x01, 0xff,\n\t0x00, 0x00, 0xff, 0x80, 0x00, 0x00, 0x01, 0xff, 0x00, 0x00, 0xff, 0x80,\n\t0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0x00, 0xff,\n\t0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0x00, 0x7f, 0x00, 0x00, 0xff, 0x00,\n\t0x00, 0x00, 0x00, 0x7f, 0x00, 0x00, 0xfe, 0x00, 0x00, 0x00, 0x00, 0x7f,\n\t0x00, 0x00, 0xfe, 0x00, 0x00, 0x00, 0x00, 0x7f, 0x00, 0x00, 0xfe, 0x00,\n\t0x00, 0x00, 0x00, 0x3f, 0x00, 0x00, 0xfe, 0x00, 0x00, 0x00, 0x00, 0x3f,\n\t0x00, 0x00, 0xfe, 0x00, 0x00, 0x00, 0x00, 0x3f, 0x00, 0x00, 0xfe, 0x00,\n\t0x00, 0x00, 0x00, 0x3f, 0x00, 0x00, 0xfc, 0x00, 0x00, 0x00, 0x00, 0x3f,\n\t0x00, 0x00, 0xfc, 0x00, 0x00, 0x00, 0x00, 0x3f, 0x00, 0x00, 0xfc, 0x00,\n\t0x00, 0x00, 0x00, 0x3f, 0x00, 0x00, 0xfc, 0x00, 0x00, 0x00, 0x00, 0x3f,\n\t0x00, 0x00, 0xfc, 0x00, 0x00, 0x00, 0x00, 0x3f, 0x00, 0x00, 0xfc, 0x00,\n\t0x00, 0x00, 0x00, 0x3f, 0x00, 0x00, 0xfc, 0x00, 0x00, 0x00, 0x00, 0x3f,\n\t0x00, 0x00, 0xfe, 0x00, 0x00, 0x00, 0x00, 0x3f, 0x00, 0x00, 0xfe, 0x00,\n\t0x00, 0x00, 0x00, 0x3f, 0x00, 0x00, 0xfe, 0x00, 0x00, 0x00, 0x00, 0x3f,\n\t0x00, 0x00, 0xfe, 0x00, 0x00, 0x00, 0x00, 0x3f, 0x00, 0x00, 0xfe, 0x00,\n\t0x00, 0x00, 0x00, 0x3f, 0x00, 0x00, 0xfe, 0x00, 0x00, 0x00, 0x00, 0x7f,\n\t0x00, 0x00, 0xfe, 0x00, 0x00, 0x00, 0x00, 0x7f, 0x00, 0x00, 0xff, 0x00,\n\t0x00, 0x00, 0x00, 0x7f, 0x00, 0x00, 0xff, 0xc0, 0x00, 0x00, 0x01, 0xff,\n\t0x00, 0x00, 0xff, 0xf8, 0x00, 0x00, 0x0f, 0xff, 0x00, 0x00, 0xff, 0xff,\n\t0xc0, 0x03, 0xff, 0xff, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,\n\t0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, 0xff, 0xff,\n\t0xff, 0xff, 0xff, 0xff, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,\n\t0x00, 0x00, 0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a, 0x00, 0x00,\n\t0x00, 0x0d, 0x49, 0x48, 0x44, 0x52, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,\n\t0x01, 0x00, 0x08, 0x06, 0x00, 0x00, 0x00, 0x5c, 0x72, 0xa8, 0x66, 0x00,\n\t0x00, 0x2f, 0xa3, 0x49, 0x44, 0x41, 0x54, 0x78, 0xda, 0xed, 0x9d, 0x69,\n\t0x8c, 0x64, 0xd7, 0x75, 0xdf, 0xff, 0xf7, 0xde, 0xb7, 0xd5, 0x5e, 0xd5,\n\t0xdb, 0xcc, 0xf4, 0x2c, 0xe4, 0x0c, 0x29, 0xae, 0x1a, 0x92, 0x92, 0x48,\n\t0x89, 0x5a, 0x28, 0x6a, 0xa1, 0x24, 0x48, 0x76, 0xec, 0xc8, 0x81, 0x15,\n\t0x27, 0xce, 0x86, 0x20, 0xb0, 0x11, 0x21, 0x41, 0xbe, 0x24, 0x8e, 0x61,\n\t0xe4, 0x73, 0x80, 0x00, 0xf9, 0x14, 0x20, 0x80, 0x8d, 0x58, 0x81, 0xed,\n\t0x58, 0xde, 0xb5, 0x4b, 0xa0, 0x68, 0x51, 0x24, 0x25, 0xd9, 0xb4, 0x44,\n\t0x52, 0xe2, 0xbe, 0x0c, 0xb7, 0xd9, 0x7b, 0x66, 0x7a, 0xaf, 0xae, 0xed,\n\t0x6d, 0xf7, 0x9e, 0x7c, 0x78, 0xaf, 0x66, 0x6a, 0x9a, 0xc3, 0x99, 0x57,\n\t0x3d, 0xdd, 0x5d, 0xaf, 0xba, 0xcf, 0x8f, 0x6c, 0xf4, 0x4c, 0x4f, 0x55,\n\t0xf5, 0x7d, 0xf7, 0xbd, 0xf3, 0xbf, 0xe7, 0x9e, 0x7b, 0xee, 0xb9, 0x00,\n\t0xc3, 0x30, 0x0c, 0xc3, 0x30, 0x0c, 0xc3, 0x30, 0x0c, 0xc3, 0x30, 0x0c,\n\t0xc3, 0x30, 0x0c, 0xc3, 0x30, 0x0c, 0xc3, 0x30, 0x0c, 0xc3, 0x30, 0x0c,\n\t0xc3, 0x30, 0x0c, 0xc3, 0x30, 0x0c, 0xc3, 0x30, 0x0c, 0xc3, 0x30, 0x0c,\n\t0xc3, 0x30, 0x0c, 0xc3, 0x30, 0x0c, 0xc3, 0x30, 0x0c, 0xc3, 0x30, 0x0c,\n\t0xc3, 0x30, 0x0c, 0xc3, 0x30, 0x0c, 0xc3, 0x30, 0x0c, 0xc3, 0x30, 0x0c,\n\t0xc3, 0x30, 0x0c, 0xc3, 0x30, 0x0c, 0xc3, 0x30, 0x0c, 0xc3, 0x30, 0x5b,\n\t0x85, 0x18, 0x75, 0x03, 0x98, 0xad, 0x87, 0x88, 0x36, 0xe5, 0x73, 0x84,\n\t0xe0, 0xc7, 0x65, 0xa7, 0xc1, 0x77, 0x74, 0xcc, 0x78, 0x17, 0x63, 0x16,\n\t0x00, 0x6c, 0x00, 0xea, 0x8a, 0xef, 0x01, 0x8a, 0x46, 0x9b, 0x49, 0x43,\n\t0xc6, 0xe9, 0xbf, 0x9d, 0x00, 0x18, 0x63, 0xde, 0xf1, 0x79, 0x42, 0x08,\n\t0x28, 0x29, 0x01, 0x00, 0x52, 0x4a, 0xc8, 0xc4, 0xe8, 0x23, 0x21, 0xc5,\n\t0xb2, 0x00, 0x3a, 0xe9, 0x5b, 0xd7, 0x7d, 0x3c, 0x62, 0x00, 0x7a, 0xfd,\n\t0xbf, 0xb1, 0x60, 0xe4, 0x1f, 0xbe, 0x43, 0x39, 0xe3, 0x0a, 0x06, 0x2e,\n\t0x01, 0x58, 0xe9, 0x77, 0x00, 0x70, 0x0c, 0xd1, 0xa4, 0xd6, 0xc6, 0x33,\n\t0xc6, 0x20, 0xd2, 0x1a, 0x51, 0xa4, 0x8b, 0xb1, 0xd1, 0x37, 0xc7, 0xb1,\n\t0x9e, 0x8a, 0x63, 0x23, 0xb4, 0x31, 0x30, 0xc6, 0xc0, 0x10, 0xc1, 0x18,\n\t0x12, 0x00, 0x66, 0x00, 0xba, 0x5d, 0x1b, 0xaa, 0x10, 0x51, 0x72, 0xd7,\n\t0x09, 0x88, 0xb5, 0xc6, 0xfa, 0x5f, 0x27, 0x65, 0x22, 0x00, 0x42, 0x24,\n\t0x02, 0xa0, 0xa4, 0x04, 0x11, 0xba, 0x04, 0x7a, 0x0d, 0xc0, 0x9c, 0x14,\n\t0x82, 0x92, 0x9f, 0x27, 0xaf, 0xb3, 0x2d, 0x15, 0x59, 0x96, 0x3a, 0x61,\n\t0x29, 0x75, 0xd6, 0xb1, 0x2d, 0xad, 0x94, 0x84, 0x92, 0xd2, 0x28, 0x29,\n\t0xd7, 0x00, 0x34, 0x01, 0x98, 0xfe, 0xa5, 0x01, 0x88, 0x06, 0xfe, 0x0e,\n\t0x80, 0x45, 0x62, 0xd4, 0x70, 0xef, 0x8f, 0x80, 0x75, 0x46, 0x2e, 0x90,\n\t0x8c, 0xdc, 0x2a, 0xfd, 0xb3, 0x67, 0x0c, 0x35, 0xb4, 0x31, 0x4e, 0x18,\n\t0xc5, 0x08, 0xa2, 0xa8, 0x16, 0xc5, 0xfa, 0x3d, 0x61, 0x18, 0x55, 0xc2,\n\t0x58, 0xc3, 0x18, 0x53, 0x27, 0xe0, 0x0e, 0xad, 0xcd, 0x64, 0xac, 0x35,\n\t0x8c, 0x21, 0x00, 0xe4, 0x11, 0x70, 0x90, 0x88, 0x6a, 0x44, 0x10, 0x57,\n\t0x10, 0x11, 0x1b, 0x40, 0x09, 0xef, 0xe2, 0x21, 0x64, 0x40, 0x03, 0xe8,\n\t0x02, 0x08, 0x07, 0x1b, 0x0d, 0x21, 0x20, 0x85, 0x88, 0x85, 0xc0, 0x39,\n\t0x00, 0x0b, 0x42, 0x08, 0x6d, 0x29, 0x25, 0x2c, 0xa5, 0x22, 0x29, 0xc5,\n\t0x71, 0x21, 0xf0, 0x96, 0x14, 0x32, 0xb2, 0x2d, 0x05, 0xdb, 0xb6, 0xba,\n\t0xb6, 0xa5, 0xde, 0xf4, 0x1c, 0x7b, 0xc9, 0xb6, 0x2c, 0x28, 0x25, 0x63,\n\t0x29, 0xc4, 0x4a, 0xfa, 0xb9, 0x26, 0xfd, 0x1d, 0xfa, 0xb2, 0x8e, 0x61,\n\t0x71, 0xd8, 0x72, 0xb8, 0x87, 0xb7, 0x98, 0x01, 0x63, 0x1c, 0x34, 0xf2,\n\t0x12, 0x11, 0xd5, 0x62, 0xad, 0x55, 0x2f, 0x88, 0xdc, 0x20, 0x8c, 0x8e,\n\t0x04, 0x61, 0x34, 0x13, 0xc6, 0x5a, 0x1a, 0x63, 0x66, 0x8c, 0x31, 0x77,\n\t0x46, 0xb1, 0x2e, 0xc7, 0xc6, 0x00, 0x44, 0x15, 0x22, 0x1c, 0x32, 0x44,\n\t0xa5, 0xf4, 0xb3, 0x2c, 0x24, 0xc6, 0x6c, 0x0f, 0xfc, 0x1a, 0x31, 0xf0,\n\t0x35, 0x92, 0xcb, 0x1c, 0xf8, 0xea, 0xff, 0xdd, 0x47, 0x62, 0xdc, 0x24,\n\t0x12, 0xa1, 0xe8, 0x09, 0x81, 0xd3, 0x10, 0xa2, 0x69, 0x49, 0x49, 0xb6,\n\t0x6d, 0x75, 0x95, 0x94, 0xaf, 0x2a, 0x25, 0xcf, 0x79, 0x8e, 0xed, 0xbb,\n\t0x8e, 0x7d, 0xdc, 0x73, 0xec, 0x0b, 0xb6, 0xa5, 0x8c, 0x92, 0xd2, 0x08,\n\t0x21, 0x9a, 0x48, 0xa6, 0x1c, 0x1a, 0xc9, 0x14, 0x83, 0x00, 0x16, 0x85,\n\t0xcd, 0x86, 0x7b, 0x73, 0x13, 0x59, 0x67, 0xec, 0x12, 0x80, 0x43, 0x40,\n\t0x5d, 0xc7, 0xba, 0xd0, 0x0d, 0xc2, 0x7d, 0xbd, 0x20, 0xbc, 0xc1, 0x0f,\n\t0x22, 0x57, 0x1b, 0x7d, 0x20, 0x8e, 0xf5, 0x6d, 0x61, 0x14, 0x7b, 0xda,\n\t0x90, 0x43, 0xa0, 0x83, 0xc6, 0xd0, 0x24, 0x11, 0x49, 0x00, 0x0e, 0x80,\n\t0x32, 0x2e, 0x8d, 0xd6, 0x02, 0x97, 0xdc, 0xff, 0x71, 0xc7, 0xe0, 0x92,\n\t0x48, 0x68, 0x24, 0x06, 0x1e, 0x48, 0x29, 0x43, 0x29, 0xc4, 0x69, 0x21,\n\t0xb0, 0xa8, 0xa4, 0x24, 0xc7, 0xb6, 0x42, 0x4b, 0xa9, 0xd7, 0x6d, 0x4b,\n\t0x9d, 0x2c, 0x78, 0xce, 0x72, 0xd1, 0x75, 0xdf, 0x72, 0x1d, 0xab, 0x25,\n\t0xa5, 0x0c, 0x01, 0xac, 0x02, 0x08, 0x70, 0xc9, 0x6b, 0x60, 0x51, 0xb8,\n\t0x0e, 0xb8, 0xe7, 0x36, 0xc0, 0x80, 0xa1, 0x0f, 0xba, 0xef, 0x65, 0x22,\n\t0x2a, 0x07, 0x51, 0x64, 0x75, 0x7a, 0xe1, 0xfe, 0x20, 0x0c, 0x0f, 0x84,\n\t0x51, 0x3c, 0x19, 0xc5, 0xfa, 0xce, 0x20, 0x8a, 0xa7, 0x8d, 0x31, 0x7b,\n\t0x8c, 0xa1, 0x03, 0xda, 0x18, 0x1b, 0x40, 0x01, 0x97, 0xbb, 0xe4, 0x12,\n\t0x7c, 0x2f, 0x06, 0xc5, 0xc1, 0x00, 0xe8, 0x08, 0x21, 0xba, 0x4a, 0xca,\n\t0x35, 0x29, 0xc4, 0x09, 0xa5, 0x64, 0xc7, 0xb1, 0xad, 0x15, 0x4b, 0xc9,\n\t0x97, 0x5d, 0xc7, 0x5e, 0x28, 0x15, 0xdc, 0x73, 0x25, 0xcf, 0x3b, 0xa5,\n\t0x94, 0x6c, 0x02, 0x58, 0x4b, 0xdf, 0xc3, 0xa2, 0x30, 0x24, 0xdc, 0x4b,\n\t0x19, 0x58, 0x37, 0xb2, 0x5b, 0x00, 0x2a, 0x04, 0x14, 0xbb, 0x7e, 0xd0,\n\t0xe8, 0xf6, 0x82, 0xc3, 0xbd, 0x20, 0xac, 0x47, 0xb1, 0xbe, 0x39, 0x8c,\n\t0xe2, 0xc3, 0x71, 0xac, 0x5d, 0x43, 0x34, 0xab, 0x8d, 0x99, 0x25, 0x22,\n\t0x0f, 0x40, 0x05, 0xc9, 0xa8, 0x2e, 0xb1, 0x73, 0x46, 0xf2, 0xed, 0x84,\n\t0x70, 0x49, 0x1c, 0x22, 0x00, 0x2d, 0x29, 0x85, 0xaf, 0xa4, 0x3c, 0x6f,\n\t0x29, 0x75, 0xd2, 0x75, 0xec, 0xb7, 0x1d, 0x4b, 0x1d, 0xb3, 0x6d, 0xcb,\n\t0x2f, 0xb8, 0xce, 0x42, 0xd1, 0x73, 0x4f, 0xd8, 0x96, 0x6a, 0x02, 0x68,\n\t0xa5, 0xaf, 0x67, 0x41, 0xb8, 0x0a, 0xdc, 0x2b, 0x57, 0xe0, 0x0a, 0x23,\n\t0xbc, 0x67, 0x0c, 0x4d, 0xb5, 0x7b, 0xfe, 0x91, 0x4e, 0xcf, 0x3f, 0xe0,\n\t0x07, 0xd1, 0xed, 0x7e, 0x18, 0x1d, 0x88, 0xb5, 0x9e, 0xd4, 0xc6, 0x1c,\n\t0x36, 0xda, 0x94, 0x29, 0x71, 0xdb, 0x8b, 0xb8, 0x64, 0xe8, 0x6c, 0xec,\n\t0x5b, 0x4b, 0x7f, 0xb4, 0xef, 0x02, 0x68, 0x49, 0x29, 0xb5, 0x92, 0x62,\n\t0x41, 0x49, 0x79, 0xdc, 0x75, 0xec, 0x13, 0xae, 0x63, 0xbf, 0x52, 0x70,\n\t0x9d, 0x33, 0x95, 0x52, 0xe1, 0x84, 0x6b, 0x5b, 0x2c, 0x08, 0xef, 0x02,\n\t0xf7, 0x42, 0xca, 0xba, 0x51, 0xbe, 0x08, 0xa0, 0xd2, 0xe9, 0xf9, 0x7b,\n\t0x5a, 0x5d, 0xff, 0x50, 0xd7, 0x0f, 0x0f, 0xfb, 0x41, 0x78, 0x8f, 0xd6,\n\t0xe6, 0xf6, 0x58, 0xeb, 0x3d, 0x86, 0xa8, 0x8e, 0xc4, 0x8d, 0xef, 0xcf,\n\t0xf5, 0xb9, 0x1f, 0xf3, 0x41, 0x5f, 0x14, 0x7a, 0x42, 0x60, 0x55, 0x49,\n\t0x39, 0xaf, 0x94, 0x7a, 0xdb, 0xb5, 0xad, 0x53, 0x9e, 0xeb, 0xbc, 0x54,\n\t0x74, 0x9d, 0xb3, 0x65, 0x16, 0x84, 0xcb, 0xd8, 0x9d, 0x57, 0x8d, 0xcb,\n\t0x0c, 0x5e, 0x22, 0x89, 0xa8, 0x97, 0x83, 0x28, 0xae, 0xb5, 0x3a, 0xbd,\n\t0x23, 0xbd, 0x20, 0x7c, 0x6f, 0xd7, 0x0f, 0x6e, 0x0f, 0xa3, 0xf8, 0x50,\n\t0xac, 0xf5, 0x8d, 0xc6, 0x50, 0x1d, 0x40, 0x1d, 0x97, 0x5c, 0x79, 0x66,\n\t0x3c, 0x58, 0x27, 0x08, 0x6a, 0x41, 0x29, 0xf9, 0x96, 0x6b, 0x5b, 0xa7,\n\t0x0a, 0x9e, 0xf3, 0x52, 0xb9, 0xe0, 0x9d, 0x2d, 0x17, 0x0b, 0x27, 0x2c,\n\t0x25, 0xe7, 0x91, 0x78, 0x12, 0x17, 0x93, 0x99, 0x76, 0x8b, 0x20, 0xec,\n\t0x8e, 0xab, 0x4c, 0x59, 0x3f, 0xca, 0xc7, 0xda, 0x4c, 0xb7, 0xbb, 0xfe,\n\t0x0d, 0x5d, 0xdf, 0xdf, 0xdf, 0xf5, 0xc3, 0x3b, 0x83, 0x30, 0x3a, 0x14,\n\t0x69, 0x7d, 0x8b, 0x31, 0xe6, 0x00, 0x11, 0x6a, 0xb8, 0x94, 0x5d, 0xb7,\n\t0xab, 0xfa, 0x69, 0x07, 0x33, 0x20, 0x08, 0x62, 0xc5, 0x52, 0x72, 0xc9,\n\t0xb6, 0xac, 0xd7, 0x3c, 0xd7, 0x7e, 0xbe, 0xe4, 0xb9, 0xaf, 0x97, 0x8b,\n\t0x85, 0x33, 0x45, 0xcf, 0x39, 0x8d, 0x24, 0x81, 0x29, 0xc0, 0x2e, 0x10,\n\t0x83, 0x9d, 0x7b, 0x65, 0x78, 0xc7, 0x5c, 0xde, 0x01, 0x50, 0xf2, 0x83,\n\t0xb0, 0xb1, 0xd6, 0xf5, 0x8f, 0x74, 0x7a, 0xfe, 0x51, 0x3f, 0x88, 0xee,\n\t0x8e, 0xe2, 0xf8, 0xf6, 0x58, 0x9b, 0x29, 0x22, 0x9a, 0x40, 0xe2, 0xd6,\n\t0x0f, 0x66, 0xdd, 0x8d, 0x19, 0xe2, 0x1d, 0x7f, 0xa6, 0xcb, 0x7e, 0x7e,\n\t0xb5, 0x3d, 0x01, 0xef, 0xf6, 0x6f, 0xe2, 0x5d, 0x7e, 0x4a, 0x19, 0xde,\n\t0x9b, 0x7b, 0x08, 0x49, 0x72, 0x53, 0x53, 0x4a, 0xb9, 0x62, 0x5b, 0xea,\n\t0xb4, 0x63, 0x5b, 0x2f, 0x95, 0x3c, 0xf7, 0x17, 0xe5, 0xa2, 0xf7, 0x5a,\n\t0xa5, 0x54, 0x38, 0x25, 0x93, 0x7c, 0x84, 0x1d, 0x2b, 0x06, 0x3b, 0xeb,\n\t0x6a, 0x52, 0x52, 0xc3, 0xef, 0x1b, 0x7d, 0xad, 0x17, 0x84, 0x07, 0xd7,\n\t0x3a, 0xbd, 0x5b, 0xdb, 0xdd, 0xde, 0xd1, 0x5e, 0x10, 0x1d, 0x8c, 0x62,\n\t0x7d, 0x1b, 0x19, 0xb3, 0x9f, 0xc6, 0xc6, 0xad, 0x17, 0x97, 0x7d, 0xa7,\n\t0x77, 0xfc, 0x1c, 0x10, 0x88, 0x20, 0x29, 0x84, 0x40, 0x04, 0x41, 0xc9,\n\t0x97, 0x44, 0xff, 0xbb, 0x0f, 0x65, 0xd6, 0x60, 0x99, 0x35, 0x48, 0xea,\n\t0x40, 0x40, 0x43, 0x99, 0x16, 0x24, 0xf9, 0x00, 0x00, 0x69, 0x39, 0x90,\n\t0x85, 0x0a, 0x84, 0x10, 0x30, 0x64, 0x41, 0x53, 0x9a, 0x63, 0x24, 0x24,\n\t0x62, 0x94, 0x10, 0xa1, 0x81, 0x10, 0x13, 0x20, 0x28, 0x10, 0x01, 0x04,\n\t0x91, 0xfc, 0x19, 0x16, 0x0c, 0x5c, 0x68, 0x14, 0x40, 0xef, 0xd2, 0x85,\n\t0xe2, 0xb2, 0xdc, 0xa0, 0xdc, 0x13, 0x23, 0xf1, 0x0e, 0x16, 0x2c, 0x25,\n\t0xdf, 0x76, 0x6c, 0xeb, 0xc5, 0x52, 0xc1, 0x7d, 0xae, 0x52, 0x2c, 0xbc,\n\t0x5a, 0x2d, 0x15, 0x4f, 0x49, 0xb9, 0xf3, 0xc4, 0x60, 0xfc, 0xaf, 0x00,\n\t0xef, 0x48, 0xad, 0x55, 0x00, 0x8a, 0x7e, 0x10, 0xcd, 0xac, 0x75, 0x7b,\n\t0xef, 0x6f, 0x75, 0x7a, 0x1f, 0xf4, 0xc3, 0xf0, 0x68, 0x18, 0xc5, 0x37,\n\t0x19, 0x43, 0xd3, 0x00, 0x3c, 0x24, 0xae, 0x7d, 0xce, 0x8c, 0xfe, 0x92,\n\t0x91, 0x0f, 0x8e, 0xda, 0x02, 0x06, 0x92, 0x7c, 0x08, 0x0a, 0x20, 0x11,\n\t0x40, 0x52, 0x00, 0x49, 0x5d, 0xd8, 0x66, 0x19, 0x96, 0x59, 0x86, 0xa5,\n\t0x97, 0x60, 0x9b, 0x25, 0xd8, 0xb4, 0x0c, 0x65, 0xda, 0x90, 0xd4, 0xbb,\n\t0xec, 0x4b, 0x51, 0x17, 0x82, 0x02, 0x08, 0x18, 0x08, 0x18, 0x80, 0x92,\n\t0x15, 0x35, 0xd1, 0x5f, 0x59, 0x23, 0x02, 0x4a, 0x7b, 0x20, 0x26, 0x8f,\n\t0x02, 0x52, 0x01, 0x74, 0xb9, 0x17, 0x41, 0x90, 0xa9, 0xb1, 0x4b, 0x80,\n\t0x08, 0x44, 0x80, 0x21, 0x05, 0x03, 0x07, 0x06, 0x2e, 0x22, 0x54, 0x10,\n\t0xa1, 0x0e, 0x63, 0x04, 0x0c, 0x49, 0x18, 0x38, 0x88, 0x51, 0x81, 0x8f,\n\t0xe9, 0xe4, 0xe7, 0xa4, 0x60, 0xe0, 0x41, 0xa3, 0x08, 0x03, 0x6b, 0xfd,\n\t0x9d, 0x1b, 0x78, 0x00, 0x73, 0x27, 0x10, 0x97, 0x89, 0x81, 0xeb, 0xd8,\n\t0x2f, 0x56, 0x8a, 0xde, 0x73, 0xd5, 0x52, 0xf1, 0xd5, 0x72, 0xd1, 0x3b,\n\t0x85, 0x1d, 0x32, 0x4d, 0x18, 0xcf, 0x56, 0xa7, 0xac, 0x73, 0xf1, 0xdd,\n\t0x58, 0xeb, 0x7d, 0xad, 0x4e, 0xef, 0xa6, 0x66, 0xbb, 0x77, 0x57, 0xc7,\n\t0x0f, 0xee, 0x09, 0xa3, 0xf8, 0x7d, 0xc6, 0x98, 0x43, 0x48, 0xa2, 0xfa,\n\t0xd6, 0x86, 0x7f, 0xd1, 0xa6, 0x92, 0x64, 0xec, 0x5e, 0x6e, 0xe4, 0x11,\n\t0x14, 0xf9, 0x10, 0xa9, 0xc1, 0xda, 0x66, 0x09, 0xb6, 0xbe, 0x00, 0x47,\n\t0x9f, 0x87, 0x63, 0x2e, 0xc0, 0x36, 0x4b, 0x50, 0xa6, 0x09, 0x65, 0x9a,\n\t0xb0, 0xcc, 0x1a, 0x14, 0x75, 0x20, 0x10, 0x27, 0x5f, 0xa4, 0x21, 0x10,\n\t0x23, 0x99, 0xde, 0x5e, 0x59, 0x44, 0xae, 0x7e, 0x9b, 0x09, 0x28, 0xee,\n\t0x85, 0x98, 0xba, 0x1b, 0x10, 0xc3, 0x6c, 0x15, 0x18, 0x1c, 0xd9, 0x07,\n\t0xb6, 0x18, 0x0e, 0x7a, 0x07, 0x24, 0xa0, 0x8d, 0x8d, 0x10, 0x0d, 0x84,\n\t0x98, 0x82, 0x26, 0x07, 0xda, 0x28, 0x44, 0x54, 0x43, 0x4f, 0xec, 0x47,\n\t0x44, 0x15, 0x10, 0x24, 0x34, 0x3c, 0x68, 0x94, 0x2e, 0xca, 0x41, 0x0e,\n\t0xa7, 0x17, 0x17, 0xc5, 0xc0, 0xb6, 0xd4, 0xdb, 0x05, 0xd7, 0x79, 0xb1,\n\t0x52, 0x2c, 0x3c, 0x53, 0x29, 0x79, 0x3f, 0x2f, 0x7a, 0xee, 0x39, 0x24,\n\t0x01, 0xc4, 0xf8, 0xe2, 0x1d, 0x1e, 0x23, 0x31, 0x18, 0x9f, 0x96, 0xa6,\n\t0xac, 0x9f, 0xd7, 0x13, 0x51, 0xb5, 0xdd, 0xf5, 0x0f, 0x34, 0xdb, 0xdd,\n\t0x3b, 0x3a, 0xbd, 0xe0, 0x41, 0x3f, 0x8c, 0xde, 0xa7, 0x8d, 0x3e, 0x40,\n\t0x84, 0x3a, 0x00, 0x77, 0xb4, 0xd7, 0xd8, 0x37, 0xf6, 0xa4, 0x09, 0x12,\n\t0x21, 0xa4, 0xe9, 0x42, 0x51, 0x07, 0xb6, 0x59, 0x84, 0xab, 0x4f, 0xc3,\n\t0xd5, 0x67, 0xe0, 0xe8, 0xf3, 0xb0, 0xcc, 0x22, 0x6c, 0xbd, 0x08, 0xdb,\n\t0x2c, 0x41, 0x52, 0x37, 0x75, 0xe9, 0xa3, 0xd4, 0xb8, 0xd3, 0x6b, 0x7f,\n\t0x47, 0xba, 0xff, 0x66, 0x5c, 0xda, 0x46, 0x05, 0x60, 0x88, 0xcf, 0xbf,\n\t0xf8, 0x9d, 0xd2, 0xff, 0xad, 0xc4, 0x83, 0x30, 0x02, 0x9a, 0x6c, 0x84,\n\t0x98, 0x84, 0x4f, 0x33, 0x88, 0x74, 0x01, 0x3d, 0xcc, 0xc2, 0xc7, 0x5e,\n\t0x18, 0x58, 0xd0, 0x28, 0x40, 0xc3, 0x1b, 0xb8, 0xda, 0x5c, 0x4c, 0x27,\n\t0x34, 0x80, 0xae, 0x94, 0xe2, 0xac, 0x6d, 0x59, 0xbf, 0x28, 0xba, 0xce,\n\t0xf3, 0xb5, 0x4a, 0xe9, 0xf9, 0x6a, 0xa9, 0xf0, 0x96, 0x6d, 0xa9, 0x39,\n\t0x00, 0x3d, 0x8c, 0x91, 0x57, 0x90, 0xff, 0x16, 0xa6, 0xac, 0x33, 0x7c,\n\t0x2f, 0x8c, 0xe3, 0xd9, 0xd5, 0xb5, 0xce, 0xbd, 0xcd, 0x4e, 0xef, 0xfd,\n\t0x7e, 0x10, 0xde, 0x15, 0xc5, 0xfa, 0x26, 0x22, 0xda, 0x87, 0x24, 0xc5,\n\t0x76, 0x04, 0xd7, 0xb5, 0xde, 0xd8, 0x03, 0x28, 0xd3, 0x81, 0x45, 0x2b,\n\t0xf0, 0xe2, 0x13, 0x70, 0xe3, 0xe3, 0xf0, 0xf4, 0x69, 0x38, 0x7a, 0x0e,\n\t0x8e, 0x3e, 0x07, 0x8b, 0x9a, 0x90, 0x14, 0xa4, 0xee, 0xb9, 0x1e, 0x78,\n\t0x6f, 0x96, 0x51, 0x7b, 0x53, 0x7b, 0x76, 0x8b, 0x05, 0x20, 0xc3, 0xef,\n\t0xbf, 0x28, 0x0e, 0x22, 0x89, 0x29, 0x90, 0x8d, 0xd8, 0x38, 0xf0, 0x69,\n\t0x1f, 0x7c, 0x9a, 0x44, 0x6c, 0x12, 0x61, 0x08, 0x30, 0x75, 0x51, 0x18,\n\t0x0c, 0x9c, 0xb4, 0x97, 0x46, 0x26, 0x0a, 0x17, 0x03, 0x88, 0x96, 0x92,\n\t0x67, 0x3d, 0xc7, 0x79, 0xa1, 0x54, 0xf4, 0x9e, 0x68, 0x54, 0x4a, 0x2f,\n\t0x95, 0x0a, 0xee, 0x49, 0x00, 0x2b, 0x48, 0xbd, 0x82, 0x3c, 0x0b, 0x41,\n\t0x7e, 0x5b, 0x36, 0x00, 0xa5, 0x73, 0x4f, 0x21, 0x60, 0xf5, 0x82, 0xf0,\n\t0xe0, 0xca, 0x5a, 0xe7, 0xfe, 0xb5, 0x4e, 0xef, 0x21, 0x3f, 0x08, 0xef,\n\t0xd7, 0xc6, 0x1c, 0xc0, 0xb6, 0xbb, 0xf8, 0xfd, 0x60, 0x5c, 0x5a, 0x38,\n\t0x03, 0x3e, 0xa4, 0xe9, 0xc2, 0x36, 0x2b, 0x70, 0xf5, 0x49, 0x78, 0xf1,\n\t0x09, 0x78, 0xfa, 0x24, 0xdc, 0xf8, 0x04, 0x5c, 0x7d, 0x16, 0x8a, 0x3a,\n\t0xe9, 0xa8, 0x9e, 0xb8, 0xe9, 0xb4, 0xe9, 0xa3, 0xf8, 0x75, 0xf5, 0xee,\n\t0x88, 0x05, 0xe0, 0x5d, 0xda, 0x74, 0xf1, 0x3b, 0x81, 0x48, 0x26, 0x71,\n\t0x04, 0x63, 0x21, 0x26, 0x0f, 0x3d, 0x9a, 0x85, 0x6f, 0x26, 0x11, 0xd0,\n\t0x24, 0xba, 0x38, 0x84, 0x18, 0x05, 0x68, 0x14, 0x41, 0x90, 0xa3, 0x12,\n\t0x04, 0x42, 0x32, 0x45, 0x38, 0xef, 0xda, 0xd6, 0x5b, 0xa5, 0x82, 0xf7,\n\t0x64, 0xbd, 0x52, 0xfa, 0x61, 0xb5, 0x5c, 0x78, 0x4e, 0x0a, 0xd1, 0x02,\n\t0xf2, 0x2b, 0x02, 0xf9, 0x6c, 0xd5, 0x3a, 0xfa, 0xa3, 0xbf, 0x31, 0xe6,\n\t0xe0, 0xf1, 0xb9, 0x85, 0xdf, 0x5e, 0x6b, 0x77, 0xbf, 0x68, 0x88, 0x6e,\n\t0x40, 0x62, 0xf8, 0xdb, 0x74, 0x0d, 0x89, 0xe1, 0x0a, 0x50, 0x1a, 0x5c,\n\t0x6b, 0xc2, 0x8b, 0x4f, 0xa0, 0x10, 0xbd, 0x81, 0x82, 0x7e, 0x13, 0x5e,\n\t0x7c, 0x02, 0x8e, 0x3e, 0x0f, 0x45, 0xed, 0x77, 0x31, 0xf6, 0xbc, 0x76,\n\t0x75, 0x1e, 0x05, 0xe0, 0x5d, 0xda, 0x39, 0xf8, 0x67, 0xa2, 0x24, 0xe0,\n\t0x48, 0x05, 0x04, 0xba, 0x86, 0x36, 0x1d, 0x86, 0x6f, 0x26, 0xd0, 0xc1,\n\t0x61, 0xc4, 0x28, 0xa5, 0x2b, 0x13, 0x56, 0x7a, 0x1f, 0xd6, 0xbf, 0x7f,\n\t0x4b, 0x89, 0x85, 0xc0, 0x8a, 0x6d, 0x59, 0x4f, 0xcc, 0x4e, 0x4f, 0xfc,\n\t0x8f, 0xc9, 0x5a, 0xf9, 0x59, 0xa4, 0x45, 0x50, 0xf2, 0x28, 0x02, 0x39,\n\t0x09, 0x8c, 0x65, 0x42, 0xf8, 0x61, 0x3c, 0xdd, 0x0b, 0xc2, 0x8f, 0x18,\n\t0xa2, 0xdb, 0xb0, 0xe5, 0x51, 0xfc, 0x4b, 0x2e, 0x7d, 0xe2, 0xce, 0xaf,\n\t0xa1, 0x10, 0x1f, 0x47, 0x29, 0x7a, 0x01, 0xc5, 0xe8, 0x65, 0x78, 0xf1,\n\t0x5b, 0xb0, 0xcd, 0x32, 0x14, 0xb5, 0xd7, 0xb9, 0xf0, 0x40, 0x3f, 0x10,\n\t0xc6, 0x6c, 0x26, 0xeb, 0xbc, 0x26, 0x01, 0x48, 0x68, 0x38, 0xa2, 0x05,\n\t0x47, 0xb6, 0x50, 0xc1, 0x29, 0x10, 0x59, 0x88, 0xa8, 0x84, 0x50, 0x97,\n\t0xd1, 0xa6, 0x23, 0xf0, 0xcd, 0x04, 0xba, 0x38, 0x84, 0x10, 0x75, 0x68,\n\t0x78, 0xa9, 0x20, 0x0c, 0x96, 0x2d, 0xd8, 0x12, 0x2c, 0x22, 0x4c, 0x47,\n\t0xb1, 0xbe, 0xb7, 0xe7, 0x07, 0x47, 0x51, 0x2b, 0xbf, 0x82, 0x24, 0x2e,\n\t0x90, 0x4b, 0x72, 0x2f, 0x00, 0x83, 0x4b, 0x7c, 0x41, 0x18, 0xd9, 0x71,\n\t0xac, 0xb7, 0x30, 0xb0, 0x27, 0xd3, 0x51, 0x3e, 0x82, 0x32, 0x5d, 0xb8,\n\t0xfa, 0x34, 0x0a, 0xf1, 0x6b, 0x28, 0x45, 0x2f, 0xa3, 0x14, 0xbd, 0x0c,\n\t0xc7, 0x9c, 0x87, 0x32, 0x6b, 0x10, 0xd0, 0x03, 0xf3, 0x75, 0x36, 0xf6,\n\t0xd1, 0xd2, 0x7f, 0x14, 0x14, 0x84, 0xa0, 0x8b, 0x82, 0x50, 0xc6, 0x1c,\n\t0x88, 0x04, 0x62, 0x2a, 0x23, 0xd4, 0x45, 0xb4, 0xe9, 0x46, 0x74, 0xcd,\n\t0x7e, 0x74, 0x70, 0x23, 0x22, 0x54, 0xa1, 0xd3, 0xf8, 0xf0, 0x56, 0x09,\n\t0x02, 0x11, 0x4d, 0xfa, 0x61, 0x74, 0x13, 0x00, 0xd7, 0x10, 0xf5, 0x64,\n\t0x0e, 0x47, 0x7f, 0x60, 0x0c, 0x04, 0x60, 0x00, 0xdb, 0x0f, 0xc3, 0x83,\n\t0x04, 0x9a, 0xc0, 0x26, 0x0a, 0x40, 0x32, 0x6f, 0x04, 0x24, 0xf5, 0xe0,\n\t0xe8, 0x73, 0x28, 0xc4, 0xaf, 0xa3, 0x18, 0xbd, 0x8c, 0x52, 0xf4, 0x22,\n\t0x5c, 0x7d, 0x06, 0x96, 0x59, 0x81, 0x44, 0x84, 0xc1, 0x20, 0x1d, 0x1b,\n\t0x7c, 0x9e, 0xb9, 0xe4, 0x85, 0x09, 0x01, 0xd8, 0xa2, 0x0d, 0x5b, 0xb6,\n\t0x51, 0xc2, 0x05, 0x10, 0x29, 0x84, 0xa6, 0x82, 0x9e, 0x99, 0x44, 0x8b,\n\t0x6e, 0x46, 0xcf, 0xec, 0x45, 0x17, 0x07, 0x60, 0xe0, 0xc1, 0xc0, 0xde,\n\t0x6c, 0x31, 0xb0, 0xc2, 0x38, 0x6e, 0x18, 0x43, 0x85, 0x6f, 0x3e, 0xf1,\n\t0xcc, 0xea, 0xaf, 0x7d, 0xea, 0xbe, 0x51, 0x77, 0xcc, 0x95, 0x1b, 0x39,\n\t0xea, 0x06, 0x0c, 0x81, 0xe3, 0x87, 0xd1, 0x4d, 0x44, 0x34, 0x7d, 0xbd,\n\t0x1f, 0xd4, 0x0f, 0x16, 0x49, 0xea, 0xa4, 0x6e, 0xfd, 0x8b, 0x28, 0x87,\n\t0xcf, 0xa0, 0x18, 0x1f, 0x83, 0x6d, 0x16, 0xd2, 0x0c, 0x39, 0x36, 0xf8,\n\t0x9d, 0xc1, 0xa0, 0x20, 0x10, 0x5c, 0xd5, 0x84, 0xab, 0x56, 0x51, 0xa7,\n\t0xb7, 0xa0, 0xc9, 0x43, 0x60, 0xaa, 0xe8, 0x98, 0x03, 0x68, 0x9b, 0x1b,\n\t0xd0, 0xc5, 0x7e, 0x04, 0x98, 0x81, 0x81, 0xb3, 0x19, 0x62, 0xe0, 0x18,\n\t0x6d, 0x6e, 0xed, 0xfa, 0xc1, 0xec, 0xaf, 0x7d, 0xea, 0xbe, 0xf3, 0xd7,\n\t0xfb, 0x61, 0x5b, 0xc5, 0x58, 0x08, 0xc0, 0x99, 0xf9, 0x65, 0x1c, 0x98,\n\t0x99, 0xf0, 0xc2, 0x28, 0x6e, 0x10, 0xa5, 0xeb, 0x3f, 0x43, 0x93, 0x46,\n\t0xec, 0xa9, 0x0b, 0x2f, 0x3e, 0x81, 0x72, 0xf4, 0x2c, 0xaa, 0xc1, 0x93,\n\t0x28, 0xc4, 0xc7, 0x60, 0x9b, 0xa5, 0xcb, 0xdc, 0x7a, 0x36, 0xf8, 0x9d,\n\t0x4c, 0x3a, 0x75, 0x13, 0x80, 0x12, 0x01, 0x8a, 0x72, 0x1e, 0x45, 0xcc,\n\t0x63, 0x8a, 0x9e, 0x45, 0x64, 0x2a, 0x68, 0xeb, 0x59, 0x34, 0xe9, 0x76,\n\t0xb4, 0xcd, 0x61, 0x44, 0xa2, 0x71, 0x3d, 0x9e, 0x81, 0x15, 0x6b, 0x33,\n\t0xe5, 0x87, 0x51, 0xad, 0x5c, 0xf4, 0x04, 0x00, 0x22, 0xa2, 0xdc, 0x05,\n\t0x02, 0xc7, 0x42, 0x00, 0x0e, 0xcc, 0x4c, 0x20, 0x08, 0xa3, 0x3d, 0x5a,\n\t0x9b, 0xdb, 0x80, 0x81, 0xcc, 0x90, 0x6b, 0x92, 0x06, 0xf1, 0xc8, 0x87,\n\t0xa7, 0x4f, 0xa2, 0x1c, 0x3e, 0x8b, 0x6a, 0xf8, 0x24, 0x8a, 0xd1, 0x6b,\n\t0xb0, 0xcd, 0x42, 0x6a, 0xf4, 0x12, 0x6c, 0xf4, 0xbb, 0x95, 0x4b, 0x79,\n\t0x17, 0x42, 0x00, 0x8e, 0x6a, 0x61, 0x42, 0xbd, 0x86, 0x06, 0x1d, 0x43,\n\t0x60, 0x1a, 0xe8, 0xe8, 0x7d, 0x58, 0xa3, 0x5b, 0xd1, 0xa1, 0x83, 0x08,\n\t0x31, 0x0d, 0x33, 0x64, 0x10, 0xd1, 0x10, 0xd5, 0xc2, 0x28, 0x3a, 0x80,\n\t0x64, 0xbf, 0x89, 0x3f, 0xea, 0xab, 0xbd, 0x12, 0x63, 0x21, 0x00, 0x00,\n\t0x84, 0x1f, 0x46, 0xe5, 0x28, 0xd6, 0xd3, 0xd9, 0xdb, 0x6c, 0xe0, 0xe8,\n\t0x0b, 0x28, 0x87, 0xcf, 0xa2, 0x1e, 0x3e, 0x81, 0x62, 0xf8, 0x12, 0x1c,\n\t0x33, 0x0f, 0x81, 0x38, 0x1d, 0xe9, 0x25, 0x1b, 0x3d, 0xb3, 0x8e, 0x44,\n\t0x10, 0x84, 0x00, 0x3c, 0xb5, 0x02, 0x4f, 0x2d, 0x63, 0x82, 0x5e, 0x41,\n\t0x68, 0x6a, 0x68, 0xe9, 0x43, 0x58, 0x35, 0xef, 0x45, 0x1b, 0x87, 0xa1,\n\t0x51, 0xca, 0xf6, 0x71, 0x44, 0x93, 0x41, 0x18, 0xdf, 0x82, 0x64, 0x97,\n\t0x29, 0x0b, 0xc0, 0xb0, 0x0c, 0x16, 0xed, 0xf0, 0xc3, 0xa8, 0x68, 0xc8,\n\t0xb8, 0x59, 0xdf, 0x3b, 0xdd, 0xfd, 0x1a, 0xa6, 0x7b, 0x7f, 0x09, 0x37,\n\t0x3e, 0x0d, 0x89, 0x80, 0x8d, 0x9e, 0x19, 0x92, 0x4b, 0x62, 0x90, 0xc4,\n\t0x0d, 0x5e, 0xc0, 0x04, 0xbd, 0x82, 0x4e, 0x3c, 0x8b, 0xd3, 0xfa, 0x57,\n\t0xd0, 0xc3, 0x01, 0xac, 0x3b, 0xe3, 0xe4, 0x1d, 0x10, 0xa0, 0x82, 0x28,\n\t0x2a, 0x20, 0x79, 0x7e, 0xe1, 0x39, 0x76, 0x96, 0x5f, 0xbc, 0xad, 0xe4,\n\t0x6c, 0x47, 0xdc, 0xbb, 0xe2, 0x86, 0x51, 0x7c, 0x0b, 0x11, 0x66, 0x90,\n\t0x61, 0x05, 0x40, 0x50, 0x88, 0x5a, 0xf0, 0x04, 0x0a, 0xd1, 0xeb, 0xe9,\n\t0x88, 0xdf, 0xaf, 0xdc, 0xc5, 0x30, 0x1b, 0x21, 0x19, 0x3c, 0xa4, 0x30,\n\t0xa8, 0x58, 0x27, 0xe1, 0x8a, 0xc5, 0xac, 0x11, 0x01, 0xcb, 0x18, 0xda,\n\t0x13, 0x46, 0x71, 0x23, 0x8f, 0xc6, 0x0f, 0x8c, 0x81, 0x55, 0xa4, 0x5e,\n\t0x80, 0x1d, 0x84, 0xd1, 0x3e, 0x22, 0xaa, 0x5e, 0xfb, 0x1d, 0x02, 0xb6,\n\t0x59, 0x81, 0x6d, 0x96, 0x72, 0x9e, 0xd9, 0xc6, 0x8c, 0x25, 0x42, 0xc2,\n\t0x13, 0x0b, 0x03, 0x19, 0x86, 0x57, 0xc5, 0xd6, 0xda, 0xbc, 0xa7, 0x17,\n\t0x84, 0xfb, 0x90, 0xd3, 0x54, 0xd0, 0xdc, 0x0b, 0x80, 0x10, 0x02, 0x04,\n\t0x54, 0xb5, 0x31, 0x7b, 0x81, 0x6b, 0xaf, 0x00, 0x10, 0x64, 0xb2, 0xb3,\n\t0xce, 0x2c, 0xac, 0xcb, 0xb9, 0x67, 0x98, 0xcd, 0x80, 0x50, 0xc4, 0xe9,\n\t0x34, 0xfb, 0xf3, 0x9a, 0xc8, 0x58, 0x6b, 0x2f, 0x08, 0xa3, 0x11, 0xef,\n\t0x4a, 0xbd, 0x4a, 0x03, 0x47, 0xdd, 0x80, 0x0c, 0x08, 0x3f, 0x08, 0xf7,\n\t0x68, 0x6d, 0x6e, 0x45, 0x06, 0x01, 0x00, 0x00, 0xdb, 0x2c, 0x40, 0x99,\n\t0x16, 0x72, 0xda, 0xe7, 0xcc, 0x98, 0x63, 0xcb, 0x6e, 0x56, 0x0f, 0x00,\n\t0x04, 0x94, 0xc3, 0x38, 0xde, 0x8b, 0xf4, 0x28, 0xb7, 0xcd, 0x3a, 0xaa,\n\t0x7d, 0xb3, 0x18, 0x07, 0x01, 0x40, 0x10, 0x46, 0x4e, 0x14, 0xeb, 0x7e,\n\t0xcd, 0xfd, 0xab, 0x22, 0x60, 0xe0, 0xc5, 0x27, 0xb2, 0x2a, 0x34, 0xc3,\n\t0x0c, 0x89, 0x80, 0x2d, 0x7b, 0xb0, 0xb1, 0x82, 0x4c, 0x03, 0x0c, 0xd1,\n\t0x64, 0x18, 0xe9, 0xdb, 0x90, 0x6c, 0x5c, 0xcb, 0x1d, 0xb9, 0x15, 0x80,\n\t0xc1, 0xfd, 0xff, 0x41, 0x18, 0xbb, 0xc6, 0x98, 0x8c, 0x13, 0x7a, 0x03,\n\t0x4f, 0x9f, 0xca, 0xac, 0xd0, 0x0c, 0x33, 0x1c, 0x02, 0x36, 0x9a, 0x70,\n\t0xb1, 0x98, 0x69, 0x8a, 0x49, 0x80, 0x1d, 0x86, 0x51, 0x19, 0x80, 0x0a,\n\t0xe3, 0xf8, 0xda, 0x1f, 0xbf, 0xcd, 0xe4, 0x56, 0x00, 0x06, 0x70, 0x23,\n\t0x1d, 0xdf, 0x4c, 0xc0, 0x34, 0x32, 0x48, 0xae, 0xa4, 0x10, 0x8e, 0x3e,\n\t0x87, 0x9c, 0x66, 0x5e, 0x32, 0x3b, 0x00, 0x29, 0x62, 0x28, 0x11, 0x22,\n\t0xe3, 0x14, 0x53, 0x69, 0xa2, 0xc9, 0x30, 0x8a, 0xab, 0x8e, 0x95, 0xbf,\n\t0x55, 0xf7, 0x71, 0x10, 0x00, 0x27, 0x8c, 0xe2, 0x03, 0x94, 0x9c, 0xc6,\n\t0x73, 0x0d, 0x04, 0x2c, 0xb3, 0x0a, 0xcb, 0x2c, 0x83, 0xe7, 0xff, 0xcc,\n\t0x96, 0x21, 0x04, 0x5c, 0xb1, 0x9c, 0x75, 0x9a, 0x69, 0x93, 0x31, 0xef,\n\t0x09, 0xa2, 0xf8, 0x00, 0x72, 0x68, 0x6f, 0xb9, 0x6b, 0xd0, 0x20, 0x61,\n\t0x94, 0x54, 0x54, 0x8a, 0x22, 0x9d, 0xe9, 0x70, 0x0e, 0x82, 0x80, 0x45,\n\t0x2b, 0xb0, 0xcc, 0x0a, 0xaf, 0x00, 0x30, 0x5b, 0x08, 0xa1, 0x80, 0x33,\n\t0x97, 0xd5, 0x6b, 0xbc, 0x0a, 0x32, 0x36, 0xa6, 0x14, 0x46, 0xd1, 0x10,\n\t0x29, 0xec, 0xdb, 0x47, 0xae, 0x05, 0xe0, 0x03, 0xbf, 0xf9, 0xbb, 0x88,\n\t0xb5, 0x2e, 0x69, 0x32, 0x13, 0x40, 0x96, 0x14, 0x3e, 0x01, 0x4b, 0x2f,\n\t0xc1, 0x32, 0x6b, 0x60, 0x0f, 0x80, 0xd9, 0x4a, 0x1c, 0xd9, 0x5a, 0x57,\n\t0xbd, 0xf8, 0xdd, 0x31, 0x86, 0x54, 0x18, 0x69, 0x1b, 0xe9, 0x43, 0x99,\n\t0xa7, 0x95, 0x80, 0x5c, 0x0b, 0xc0, 0x8b, 0x7f, 0xf5, 0x3f, 0x45, 0x10,\n\t0x46, 0xfb, 0x8c, 0xa1, 0x5b, 0x91, 0x54, 0xf8, 0xbd, 0x2a, 0x02, 0x04,\n\t0x57, 0xcf, 0x41, 0x20, 0x1a, 0x75, 0xd3, 0x99, 0x1d, 0x8e, 0x92, 0x1a,\n\t0x12, 0x01, 0x32, 0x0e, 0x34, 0xf5, 0x58, 0xeb, 0x9b, 0x90, 0xec, 0x09,\n\t0xc8, 0x15, 0xb9, 0x16, 0x00, 0x00, 0x08, 0x23, 0x5d, 0x88, 0xb5, 0xae,\n\t0x66, 0x6b, 0xab, 0x81, 0x63, 0xce, 0x82, 0x03, 0x80, 0xcc, 0xd6, 0x22,\n\t0x60, 0x61, 0x0d, 0x2e, 0xe6, 0xb3, 0xad, 0x04, 0x10, 0x55, 0xa3, 0x38,\n\t0x3e, 0x8c, 0xa1, 0x76, 0xb2, 0x6e, 0x0f, 0xb9, 0x14, 0x80, 0xc1, 0x25,\n\t0xc0, 0x30, 0x8e, 0x2d, 0x32, 0x94, 0xb1, 0x9d, 0xc9, 0x0e, 0x40, 0x5e,\n\t0x02, 0x64, 0xb6, 0x16, 0x01, 0x0b, 0x3d, 0xd8, 0xc8, 0x3c, 0xd5, 0x94,\n\t0x61, 0x14, 0x5b, 0x00, 0x44, 0xac, 0xf3, 0xf5, 0x6c, 0xe6, 0x52, 0x00,\n\t0x06, 0x70, 0xb5, 0xd6, 0x37, 0x02, 0xc8, 0x54, 0x06, 0x4c, 0x52, 0x08,\n\t0x5b, 0x2f, 0x8e, 0xba, 0xcd, 0xcc, 0x2e, 0x40, 0x08, 0x82, 0x14, 0x99,\n\t0x93, 0xcd, 0x84, 0xd6, 0x46, 0x69, 0x63, 0xd4, 0x99, 0xf3, 0xf9, 0x7a,\n\t0x3e, 0x73, 0x2f, 0x00, 0x51, 0xac, 0x8f, 0x50, 0x22, 0x00, 0xd7, 0x20,\n\t0x59, 0x02, 0x54, 0xd4, 0x04, 0x07, 0x00, 0x99, 0x2d, 0x47, 0x08, 0xd8,\n\t0xa2, 0x95, 0x75, 0x29, 0x50, 0x11, 0xd1, 0x01, 0x3f, 0x88, 0xa6, 0x6f,\n\t0xdc, 0x3f, 0x93, 0xab, 0x87, 0x33, 0xb7, 0x02, 0x90, 0x4e, 0x02, 0x44,\n\t0x18, 0xc5, 0x36, 0xd1, 0xb5, 0xa7, 0x00, 0xc9, 0x12, 0xe0, 0x2a, 0x2c,\n\t0xb3, 0xc6, 0x4b, 0x80, 0xcc, 0x36, 0x40, 0x28, 0x60, 0x2e, 0xeb, 0x52,\n\t0xa0, 0x65, 0x88, 0x6e, 0x08, 0xa3, 0x38, 0x77, 0xbb, 0x02, 0x73, 0x2b,\n\t0x00, 0xad, 0x4e, 0x0f, 0x00, 0x64, 0xac, 0x4d, 0x52, 0xb3, 0xeb, 0x9a,\n\t0x08, 0x28, 0xb3, 0x06, 0x45, 0xad, 0x51, 0x37, 0x9d, 0xd9, 0x25, 0x38,\n\t0xa2, 0x99, 0x35, 0xde, 0x24, 0xb4, 0x31, 0x76, 0xa4, 0xb5, 0x05, 0x16,\n\t0x80, 0x6c, 0x54, 0x4b, 0x05, 0x84, 0x51, 0x3c, 0x41, 0x44, 0xfb, 0x91,\n\t0xb1, 0x72, 0x91, 0x6d, 0x96, 0x2f, 0x56, 0xf4, 0x65, 0x98, 0xad, 0x46,\n\t0x89, 0x08, 0x59, 0x57, 0x9c, 0x88, 0x20, 0xe2, 0x58, 0xcb, 0x4b, 0x7f,\n\t0xcf, 0xc7, 0x4a, 0x55, 0x6e, 0x05, 0x00, 0x89, 0xfb, 0xbf, 0xc7, 0x10,\n\t0xdd, 0x84, 0x74, 0x2b, 0xe5, 0x55, 0x5f, 0x0c, 0x03, 0xdb, 0xcc, 0x83,\n\t0x97, 0x00, 0x99, 0xed, 0x41, 0x40, 0x4a, 0x0d, 0x0b, 0x5d, 0x64, 0x1c,\n\t0x70, 0xca, 0x5a, 0xeb, 0x83, 0xc8, 0xd9, 0x52, 0x60, 0xae, 0x05, 0x20,\n\t0x8a, 0xb5, 0xa5, 0xb5, 0x76, 0x90, 0xa9, 0x87, 0x4d, 0xba, 0x07, 0x80,\n\t0x05, 0x80, 0xd9, 0x1e, 0x2c, 0x74, 0xe0, 0x60, 0x29, 0x6b, 0x2e, 0x40,\n\t0x23, 0xd6, 0xe6, 0x16, 0xe4, 0x6c, 0x5b, 0x70, 0x9e, 0x05, 0x00, 0x91,\n\t0xd6, 0x30, 0x26, 0x9b, 0x41, 0x0b, 0x18, 0x38, 0x7a, 0x9e, 0x9d, 0x7f,\n\t0x66, 0x9b, 0x48, 0xce, 0x8c, 0xb4, 0xd0, 0xc9, 0xfa, 0x06, 0x15, 0xc6,\n\t0xb1, 0x87, 0x9c, 0xd9, 0x5c, 0xae, 0x1a, 0x03, 0x5c, 0x3e, 0x37, 0x8a,\n\t0x63, 0x2d, 0x69, 0xb0, 0x78, 0xfb, 0x55, 0xdf, 0x68, 0x60, 0x99, 0x25,\n\t0xb0, 0x07, 0xc0, 0x6c, 0x17, 0x52, 0xc4, 0x90, 0xd9, 0xb7, 0x05, 0x43,\n\t0xa7, 0x49, 0x40, 0x51, 0x9c, 0x9f, 0x62, 0x35, 0xb9, 0x13, 0x80, 0x01,\n\t0x6c, 0x63, 0xcc, 0x3e, 0x00, 0x95, 0x6b, 0xbf, 0x54, 0x40, 0x51, 0x3b,\n\t0x2d, 0x03, 0xc6, 0x30, 0xdb, 0x84, 0x10, 0x50, 0xc2, 0x60, 0x88, 0x83,\n\t0x42, 0x84, 0xd6, 0x46, 0x2c, 0x2c, 0xaf, 0x8d, 0xba, 0xe5, 0x17, 0xc9,\n\t0xb3, 0x00, 0x14, 0xb4, 0x31, 0xb7, 0x02, 0x98, 0xba, 0xf6, 0x4b, 0x05,\n\t0x14, 0xb5, 0xa0, 0xa8, 0xcb, 0x39, 0x00, 0xcc, 0xb6, 0xe2, 0x60, 0x29,\n\t0x73, 0x32, 0x10, 0x08, 0xd3, 0x41, 0x14, 0x35, 0x66, 0x67, 0x1a, 0xb9,\n\t0x79, 0x48, 0xf3, 0x2c, 0x00, 0x2a, 0x8a, 0x75, 0x91, 0x88, 0xae, 0xb9,\n\t0x04, 0x48, 0x00, 0x94, 0x69, 0x43, 0x52, 0x0f, 0xbc, 0x04, 0xc8, 0x6c,\n\t0x27, 0x0e, 0x16, 0x33, 0x27, 0x03, 0x11, 0xe8, 0xa6, 0x28, 0xd6, 0x07,\n\t0x91, 0xa3, 0x87, 0x34, 0x97, 0x02, 0xa0, 0x8d, 0x01, 0x00, 0x8a, 0x75,\n\t0xd6, 0xb9, 0x92, 0x80, 0xa2, 0x4e, 0x2a, 0x00, 0x0c, 0xb3, 0x7d, 0x58,\n\t0xe8, 0x64, 0x4e, 0x06, 0x32, 0xc6, 0x78, 0x71, 0xac, 0x73, 0x55, 0x22,\n\t0x3c, 0x97, 0x02, 0xd0, 0xee, 0x24, 0xc7, 0xa8, 0x19, 0x93, 0x35, 0x5b,\n\t0x42, 0x40, 0x52, 0x97, 0x93, 0x80, 0x98, 0x6d, 0x27, 0xa9, 0x0d, 0x98,\n\t0xed, 0x31, 0xd5, 0x86, 0x10, 0x65, 0x1e, 0xd4, 0xb6, 0x87, 0x5c, 0x0a,\n\t0xc0, 0xc9, 0x0b, 0x8b, 0x88, 0xb5, 0xb6, 0x88, 0x28, 0x63, 0x0e, 0x00,\n\t0x41, 0x99, 0x76, 0x56, 0x57, 0x8c, 0x61, 0x36, 0x0d, 0x29, 0x87, 0x8b,\n\t0x3a, 0x69, 0xde, 0x0e, 0x7c, 0x6d, 0xee, 0xba, 0xf9, 0x90, 0x88, 0x62,\n\t0xbd, 0x17, 0x42, 0x1c, 0x41, 0xc6, 0x2c, 0x40, 0x8b, 0x56, 0x47, 0xdd,\n\t0x6c, 0x66, 0xd7, 0x91, 0x1c, 0x1e, 0x2a, 0x11, 0x21, 0x9b, 0xe7, 0x49,\n\t0x22, 0x9d, 0xde, 0xe6, 0x86, 0x5c, 0x0a, 0x00, 0x92, 0xfd, 0xd3, 0x93,\n\t0xd9, 0xf7, 0x01, 0x18, 0x58, 0x66, 0x05, 0x9c, 0x03, 0xc0, 0x6c, 0x37,\n\t0x12, 0x3e, 0x2c, 0x34, 0xb3, 0xbe, 0xdc, 0x23, 0xc2, 0x14, 0x72, 0x74,\n\t0x4a, 0x50, 0x5e, 0x05, 0x00, 0xda, 0x18, 0x99, 0x1e, 0x06, 0x92, 0x6d,\n\t0x0a, 0xc0, 0xbb, 0x00, 0x99, 0x11, 0x20, 0x11, 0xc2, 0x42, 0x27, 0xdb,\n\t0xf2, 0x33, 0xa1, 0x01, 0xe0, 0x4e, 0x00, 0xe5, 0x51, 0xb7, 0xfb, 0x52,\n\t0xfb, 0x73, 0x8a, 0xd6, 0x06, 0x59, 0x63, 0x80, 0x02, 0x04, 0x65, 0x3a,\n\t0x1c, 0xfe, 0x63, 0xb6, 0x19, 0x01, 0x89, 0x18, 0x12, 0x7e, 0xa6, 0x57,\n\t0x13, 0xe0, 0xe8, 0xa4, 0xbe, 0x65, 0x6e, 0xce, 0x0a, 0xcf, 0xad, 0x00,\n\t0xc4, 0x5a, 0x83, 0x32, 0xee, 0x03, 0x00, 0x11, 0x14, 0x75, 0xc0, 0x53,\n\t0x00, 0x66, 0xbb, 0x11, 0xd0, 0x43, 0xc4, 0x00, 0x00, 0xae, 0x09, 0x98,\n\t0x11, 0x63, 0x68, 0x08, 0x73, 0x36, 0xa9, 0x00, 0x30, 0xcc, 0xf6, 0x22,\n\t0x04, 0x41, 0x0e, 0xb1, 0xfa, 0x14, 0x1b, 0x5e, 0x06, 0xcc, 0xc4, 0x30,\n\t0xd1, 0x52, 0x81, 0x98, 0x93, 0x80, 0x98, 0x11, 0x41, 0x10, 0x22, 0xfb,\n\t0x50, 0xd5, 0x0f, 0xfc, 0x99, 0x9c, 0xac, 0x06, 0xe4, 0x56, 0x00, 0x0c,\n\t0x65, 0xed, 0x20, 0x01, 0x49, 0x01, 0x04, 0xf1, 0x61, 0x20, 0xcc, 0x08,\n\t0x10, 0x80, 0x10, 0x43, 0x44, 0x9f, 0x52, 0xad, 0xe8, 0xf8, 0xe1, 0xa8,\n\t0x5b, 0x0e, 0x20, 0x67, 0x02, 0x30, 0xb0, 0x2c, 0xa2, 0x40, 0xa8, 0x22,\n\t0xc3, 0x69, 0x40, 0x00, 0x20, 0x28, 0x4a, 0x37, 0x64, 0x70, 0x18, 0x90,\n\t0xd9, 0x6e, 0x44, 0xea, 0x01, 0x64, 0x2c, 0x0d, 0x86, 0xc4, 0xbb, 0xed,\n\t0xfa, 0xc1, 0xa8, 0x1b, 0x0e, 0x20, 0x67, 0x02, 0x30, 0x40, 0x51, 0x48,\n\t0x71, 0x07, 0x32, 0xed, 0x04, 0x4c, 0x96, 0x62, 0x04, 0x71, 0x16, 0x20,\n\t0x33, 0x1a, 0x14, 0xba, 0x99, 0x0f, 0xa3, 0x21, 0x22, 0x68, 0x6d, 0xd0,\n\t0x65, 0x0f, 0xe0, 0xaa, 0xd8, 0x64, 0x30, 0x85, 0x4c, 0x67, 0xa9, 0x89,\n\t0xd4, 0x03, 0x88, 0x79, 0x0d, 0x80, 0x19, 0x09, 0x0a, 0x5d, 0x20, 0xfb,\n\t0x69, 0x54, 0x52, 0x1b, 0x23, 0xce, 0x2d, 0x2d, 0x8f, 0xba, 0xd9, 0x49,\n\t0x63, 0x46, 0xdd, 0x80, 0x77, 0x43, 0x53, 0xf6, 0x42, 0x0b, 0xc9, 0x1e,\n\t0x80, 0x7c, 0x45, 0x57, 0x99, 0xdd, 0x83, 0xa2, 0x5e, 0x56, 0x0f, 0x40,\n\t0x01, 0xd8, 0x67, 0x0c, 0x4d, 0x7f, 0xe4, 0xe8, 0xad, 0xb9, 0x98, 0xaf,\n\t0xe6, 0x56, 0x00, 0x88, 0x28, 0x93, 0xfd, 0x13, 0xd2, 0x18, 0x00, 0x71,\n\t0x0c, 0x80, 0x19, 0x0d, 0x12, 0x99, 0x77, 0x04, 0x2a, 0x21, 0xc4, 0x8d,\n\t0x44, 0x74, 0x00, 0x39, 0xb1, 0xbd, 0x5c, 0x34, 0xe2, 0x4a, 0x10, 0x65,\n\t0xcf, 0x03, 0x10, 0x88, 0xb3, 0x56, 0x65, 0x61, 0x98, 0x4d, 0x27, 0x79,\n\t0xfe, 0x32, 0x3d, 0xad, 0x02, 0x80, 0x43, 0x9c, 0x09, 0x78, 0x6d, 0xb2,\n\t0xef, 0x93, 0x10, 0x48, 0xd4, 0x97, 0x23, 0x00, 0xcc, 0x68, 0x18, 0x6a,\n\t0xf0, 0x21, 0xca, 0xc5, 0x26, 0xa0, 0x3e, 0x39, 0x16, 0x80, 0xec, 0x9d,\n\t0x94, 0xcc, 0xbf, 0xf2, 0xd3, 0xa9, 0xcc, 0xee, 0x62, 0x98, 0xe7, 0x2f,\n\t0x6f, 0x4f, 0x69, 0x6e, 0x05, 0x60, 0x28, 0xc8, 0x40, 0x64, 0x4e, 0x1c,\n\t0x62, 0x98, 0xcd, 0x66, 0x38, 0xb3, 0x66, 0x0f, 0x20, 0x03, 0xec, 0x01,\n\t0x30, 0xe3, 0xc2, 0x50, 0x1e, 0x00, 0x4f, 0x01, 0xb2, 0x31, 0x5c, 0x17,\n\t0xb1, 0x00, 0x30, 0x23, 0x64, 0x88, 0xbd, 0x00, 0xc0, 0x30, 0xf1, 0xad,\n\t0xad, 0x27, 0xb7, 0x02, 0x30, 0xdc, 0x82, 0x9e, 0xca, 0xf3, 0xa5, 0x30,\n\t0xcc, 0x65, 0x0c, 0xb3, 0x75, 0x60, 0xab, 0xc9, 0xad, 0xd5, 0x0c, 0xb3,\n\t0xc1, 0x82, 0x32, 0x9e, 0x1e, 0xc6, 0x30, 0x5b, 0x43, 0xc6, 0xc2, 0x55,\n\t0x00, 0x44, 0xfa, 0x5f, 0x5e, 0xc8, 0xad, 0x00, 0x0c, 0x85, 0x50, 0xa0,\n\t0x3c, 0xc9, 0x2a, 0xb3, 0xab, 0x20, 0x1a, 0xe2, 0xd9, 0x1b, 0x76, 0xf7,\n\t0xe0, 0x16, 0x93, 0x5b, 0x01, 0x18, 0x6e, 0x87, 0x25, 0x7b, 0x00, 0xcc,\n\t0xe8, 0xa0, 0x21, 0x3c, 0x00, 0x60, 0xb8, 0x97, 0x6e, 0x35, 0x39, 0x16,\n\t0x80, 0xac, 0xbd, 0x44, 0x48, 0x76, 0x0f, 0xe7, 0xf6, 0x52, 0x98, 0x1d,\n\t0x4e, 0x22, 0x00, 0xd9, 0x10, 0x42, 0xb0, 0x07, 0x90, 0x95, 0xec, 0x12,\n\t0xc0, 0x1e, 0x00, 0x33, 0x3a, 0x08, 0x6a, 0x98, 0x43, 0x69, 0x63, 0x91,\n\t0xa3, 0x9d, 0x6b, 0xb9, 0x15, 0x00, 0x29, 0x64, 0x26, 0x9b, 0x16, 0x00,\n\t0x48, 0x58, 0xa9, 0x0a, 0xe7, 0x68, 0x7d, 0x85, 0xd9, 0x35, 0x0c, 0x31,\n\t0x05, 0x88, 0x41, 0x74, 0x4a, 0x08, 0xcc, 0x61, 0x88, 0xfd, 0xc3, 0x5b,\n\t0x49, 0x7e, 0x05, 0x40, 0x66, 0x1f, 0xd5, 0x09, 0x2e, 0x20, 0x72, 0xb3,\n\t0xbf, 0x82, 0xd9, 0x65, 0xc4, 0x28, 0x66, 0x9d, 0x82, 0x6a, 0x02, 0xce,\n\t0x0a, 0x21, 0xce, 0x7f, 0xf7, 0xef, 0x7f, 0x9e, 0x8b, 0xd1, 0x2a, 0xaf,\n\t0x02, 0x10, 0x4b, 0x29, 0x56, 0x01, 0x64, 0xa8, 0x9b, 0x44, 0x30, 0xc2,\n\t0x86, 0x81, 0xc5, 0x93, 0x00, 0x66, 0x24, 0xc4, 0xa2, 0x06, 0x64, 0x8f,\n\t0x03, 0xc4, 0x96, 0x52, 0xfa, 0xbd, 0x87, 0x0f, 0x8d, 0xba, 0xd9, 0x00,\n\t0xf2, 0x2b, 0x00, 0x5d, 0x22, 0xbc, 0x02, 0x60, 0x31, 0xcb, 0x8b, 0x49,\n\t0x78, 0x20, 0x61, 0x83, 0xa7, 0x00, 0xcc, 0x28, 0xd0, 0xf0, 0x32, 0x07,\n\t0xa1, 0x85, 0x10, 0x50, 0x4a, 0x8a, 0xa2, 0x97, 0xa9, 0xdc, 0xe5, 0x96,\n\t0x93, 0x2b, 0x01, 0x18, 0x88, 0x8e, 0xc6, 0x00, 0x56, 0x00, 0x64, 0xaa,\n\t0xf5, 0x6d, 0x60, 0x83, 0xb2, 0x1c, 0x21, 0xc8, 0x30, 0x9b, 0x0d, 0xd1,\n\t0x50, 0xb9, 0xbd, 0x02, 0x80, 0x92, 0x12, 0xa5, 0x22, 0x0b, 0xc0, 0xd5,\n\t0x1b, 0x26, 0xb3, 0x26, 0x58, 0x13, 0x48, 0x38, 0xa9, 0x07, 0xc0, 0x30,\n\t0xdb, 0xcf, 0x50, 0xb9, 0xfd, 0xe9, 0x20, 0x57, 0x70, 0xf2, 0xf1, 0xbc,\n\t0xe6, 0x56, 0x00, 0x94, 0xcc, 0xde, 0x34, 0x82, 0x82, 0x11, 0x19, 0xea,\n\t0x87, 0x32, 0xcc, 0x16, 0x30, 0x4c, 0x26, 0xa0, 0x92, 0xc9, 0x6b, 0xe5,\n\t0x10, 0xcf, 0xf7, 0x56, 0x92, 0x8f, 0x56, 0x5c, 0x01, 0x25, 0x65, 0xf6,\n\t0x6c, 0x40, 0x21, 0x10, 0x8b, 0xca, 0xa8, 0x9b, 0xcc, 0xec, 0x42, 0x88,\n\t0x14, 0x0c, 0xb2, 0xc7, 0x9f, 0x94, 0xca, 0x97, 0xc9, 0xe5, 0xab, 0x35,\n\t0x03, 0x58, 0x4a, 0x25, 0xb9, 0x00, 0x19, 0x20, 0x48, 0xc4, 0xb2, 0x3e,\n\t0x4c, 0x32, 0x06, 0xc3, 0x6c, 0x0a, 0x06, 0x36, 0x74, 0x96, 0xea, 0xf5,\n\t0x09, 0x64, 0x29, 0x95, 0xab, 0xbd, 0xeb, 0x39, 0x16, 0x00, 0x79, 0xd1,\n\t0x5d, 0xca, 0x72, 0x19, 0x5a, 0xd6, 0x46, 0xdd, 0x64, 0x66, 0xd7, 0x41,\n\t0x30, 0x70, 0x11, 0xa1, 0x9a, 0xa9, 0x28, 0xa8, 0x10, 0xe8, 0x28, 0x29,\n\t0xcf, 0x00, 0x19, 0xcf, 0x13, 0xdf, 0x06, 0xf2, 0x2a, 0x00, 0xa4, 0x94,\n\t0x0a, 0xa5, 0x94, 0x3d, 0x64, 0xca, 0x98, 0x12, 0xe9, 0x5a, 0x2c, 0x7b,\n\t0x00, 0xcc, 0xf6, 0xa2, 0xe1, 0x21, 0x42, 0x3d, 0xeb, 0xcb, 0x57, 0x05,\n\t0x70, 0x0c, 0x40, 0x6e, 0x8e, 0xb2, 0xce, 0xad, 0x00, 0x58, 0x4a, 0xce,\n\t0x01, 0x78, 0x03, 0xc0, 0x35, 0x4f, 0xfd, 0xec, 0x4f, 0x01, 0x58, 0x00,\n\t0x98, 0xed, 0x85, 0x92, 0x55, 0x40, 0x58, 0xc8, 0xe6, 0xd5, 0x8b, 0x58,\n\t0x4a, 0xe9, 0x23, 0x1d, 0xd4, 0xf2, 0xb0, 0x29, 0x28, 0x97, 0x02, 0xb0,\n\t0xbc, 0xd6, 0x86, 0x63, 0x5b, 0xab, 0x00, 0xe6, 0x81, 0x6c, 0x87, 0xaf,\n\t0x6b, 0x59, 0xe1, 0x5c, 0x00, 0x66, 0xdb, 0x31, 0x26, 0xe3, 0x89, 0x00,\n\t0x29, 0x1c, 0x04, 0xcc, 0x40, 0xb5, 0x5c, 0x04, 0x00, 0x92, 0x52, 0x66,\n\t0xee, 0x5b, 0x23, 0x4a, 0x30, 0xc2, 0x43, 0x8e, 0xe2, 0x2b, 0xcc, 0x2e,\n\t0x20, 0xa6, 0x02, 0xb2, 0x9a, 0x91, 0x94, 0x82, 0x2c, 0xa5, 0x72, 0xf5,\n\t0x80, 0xe6, 0x52, 0x00, 0xac, 0x74, 0x8d, 0xd4, 0xb6, 0xb2, 0x75, 0x96,\n\t0x00, 0x21, 0x96, 0x35, 0x68, 0x59, 0x02, 0x0b, 0x00, 0xb3, 0x9d, 0x84,\n\t0x98, 0xcc, 0x5a, 0x0f, 0x40, 0x2b, 0x29, 0x17, 0x1d, 0xdb, 0x5a, 0x41,\n\t0x8e, 0x1e, 0xd2, 0x5c, 0x0a, 0x40, 0x4a, 0x64, 0x5b, 0xea, 0x82, 0x10,\n\t0x22, 0x43, 0xc0, 0x24, 0x15, 0x00, 0x51, 0xe1, 0x28, 0x00, 0xb3, 0xad,\n\t0xf8, 0xd8, 0x97, 0xe6, 0x01, 0x5c, 0x93, 0x58, 0x00, 0x6f, 0x39, 0xb6,\n\t0x75, 0x5a, 0x08, 0x91, 0x8b, 0xad, 0xc0, 0x40, 0xbe, 0x05, 0xa0, 0xa7,\n\t0xa4, 0x7c, 0x0d, 0xc0, 0xc2, 0xb5, 0x5f, 0x4a, 0xd0, 0xa2, 0x0a, 0x2d,\n\t0x2a, 0xc8, 0x91, 0xb8, 0x32, 0x3b, 0x1d, 0x22, 0x68, 0xb2, 0x91, 0x31,\n\t0xf8, 0x4c, 0x42, 0x88, 0x9e, 0xe7, 0xd8, 0xc1, 0x6a, 0xab, 0x3b, 0xea,\n\t0x96, 0x5f, 0x24, 0x77, 0x02, 0x30, 0x10, 0x19, 0x35, 0x96, 0x52, 0x6d,\n\t0x20, 0xdb, 0xd1, 0xab, 0x24, 0x6c, 0x44, 0x6a, 0x6a, 0xd4, 0xcd, 0x67,\n\t0x76, 0x11, 0x86, 0x2c, 0x68, 0x72, 0x90, 0x3d, 0x0b, 0x30, 0x99, 0x2a,\n\t0x54, 0xcb, 0xf9, 0x49, 0x5b, 0xcf, 0x9d, 0x00, 0x0c, 0x62, 0xdb, 0x2a,\n\t0x2d, 0x0c, 0x72, 0x6d, 0x08, 0x12, 0x91, 0xdc, 0xc3, 0xd9, 0x80, 0xcc,\n\t0x36, 0x41, 0xd0, 0x28, 0x20, 0xa4, 0xcc, 0x09, 0x68, 0xda, 0xb1, 0x55,\n\t0x08, 0xc0, 0xe4, 0xe9, 0x09, 0xcd, 0xb3, 0x00, 0x90, 0x63, 0x59, 0x6d,\n\t0x4b, 0xca, 0x65, 0x64, 0x4a, 0x06, 0x52, 0x08, 0xd4, 0x2c, 0x38, 0x17,\n\t0x80, 0xd9, 0x1e, 0x08, 0x31, 0x8a, 0x08, 0xc5, 0x64, 0xc6, 0x2c, 0x40,\n\t0xd1, 0xb4, 0x94, 0x7a, 0x03, 0x19, 0xb7, 0xb8, 0x6f, 0x17, 0xb9, 0x15,\n\t0x80, 0xff, 0xfe, 0x95, 0xaf, 0x93, 0xe7, 0xd8, 0x73, 0x52, 0xca, 0x63,\n\t0xc8, 0x50, 0x19, 0x88, 0x20, 0x10, 0xaa, 0xfd, 0x20, 0x38, 0xa3, 0x6e,\n\t0x3a, 0xb3, 0x4b, 0xd0, 0xda, 0x82, 0x81, 0x8b, 0x8c, 0x53, 0x80, 0x35,\n\t0xcb, 0x52, 0x27, 0xc1, 0x02, 0x90, 0x8d, 0xff, 0xfc, 0xaf, 0x7e, 0x05,\n\t0x96, 0xa5, 0x7a, 0x52, 0x8a, 0x16, 0x32, 0x79, 0x00, 0x84, 0x48, 0x4d,\n\t0x21, 0x96, 0x35, 0x70, 0x20, 0x90, 0xd9, 0x0e, 0x02, 0x6a, 0x64, 0xae,\n\t0x04, 0x24, 0xa5, 0x20, 0xd7, 0xb2, 0x2e, 0x26, 0xb5, 0xe5, 0x21, 0x0b,\n\t0x10, 0xc8, 0xb1, 0x00, 0x38, 0xb6, 0x05, 0x00, 0xe4, 0xda, 0x76, 0x2c,\n\t0x32, 0x08, 0x80, 0x00, 0x21, 0x92, 0x93, 0x88, 0x65, 0x23, 0x93, 0x4b,\n\t0xc6, 0x30, 0xd7, 0x4b, 0x17, 0x87, 0xb2, 0x2e, 0x01, 0x1a, 0x4b, 0xa9,\n\t0xa6, 0xeb, 0xd8, 0x1d, 0xe4, 0x6c, 0x74, 0xca, 0xad, 0x00, 0xa4, 0x04,\n\t0xb6, 0x6d, 0x1d, 0x87, 0x10, 0x2b, 0xd7, 0x7e, 0x29, 0x21, 0x16, 0x75,\n\t0x44, 0x72, 0x0a, 0x39, 0xeb, 0x63, 0x66, 0x07, 0x42, 0x04, 0x44, 0x54,\n\t0x42, 0x46, 0x13, 0x8a, 0x84, 0x10, 0x6f, 0x78, 0x8e, 0x7d, 0x1a, 0x39,\n\t0x29, 0x07, 0xde, 0x27, 0xff, 0x02, 0x60, 0xa9, 0xe3, 0x02, 0x58, 0xce,\n\t0xf2, 0x62, 0x12, 0x16, 0x02, 0xeb, 0x20, 0xaf, 0x04, 0x30, 0x5b, 0x8e,\n\t0x26, 0x0f, 0x11, 0x95, 0x91, 0x71, 0xb0, 0x31, 0x4a, 0x8a, 0xa6, 0x65,\n\t0xa9, 0x8e, 0xd6, 0xb9, 0xb2, 0xff, 0x7c, 0x0a, 0xc0, 0xc0, 0xfc, 0x88,\n\t0x3c, 0xc7, 0x0e, 0x94, 0x92, 0x99, 0x72, 0x01, 0x00, 0x05, 0x5f, 0x1d,\n\t0xce, 0xeb, 0x65, 0x31, 0x3b, 0x06, 0x42, 0x84, 0x0a, 0x02, 0xec, 0xc9,\n\t0xb6, 0x02, 0x00, 0x84, 0xae, 0x6d, 0x37, 0x01, 0xc4, 0xbc, 0x19, 0x68,\n\t0x38, 0xc8, 0x73, 0xec, 0x65, 0x4b, 0xa9, 0xd3, 0xc8, 0xb4, 0x2d, 0x58,\n\t0xc0, 0xb7, 0x6e, 0x80, 0x11, 0xf9, 0xa8, 0xb8, 0xca, 0xec, 0x54, 0x08,\n\t0x91, 0x2e, 0x20, 0x46, 0x11, 0x99, 0xc6, 0x25, 0x21, 0x96, 0x1c, 0xc7,\n\t0x7a, 0x05, 0x39, 0xaa, 0x03, 0xd0, 0x27, 0xd7, 0x02, 0xd0, 0xea, 0xfa,\n\t0x70, 0x1d, 0x7b, 0x5e, 0x29, 0xf9, 0x36, 0x92, 0x8c, 0xc0, 0xab, 0x22,\n\t0x40, 0x08, 0xd5, 0x5e, 0x44, 0x72, 0x06, 0x39, 0x9b, 0x6a, 0x31, 0x3b,\n\t0x8c, 0x1e, 0xed, 0xcb, 0xba, 0x09, 0x88, 0xa4, 0x10, 0x6d, 0xd7, 0xb6,\n\t0x17, 0x90, 0x0e, 0x62, 0x79, 0x59, 0x01, 0x00, 0x72, 0x2e, 0x00, 0x95,\n\t0xa2, 0x07, 0x00, 0xb1, 0x63, 0x5b, 0xbe, 0x10, 0x59, 0x2c, 0xda, 0x20,\n\t0x92, 0x33, 0x88, 0xd4, 0x0c, 0xaf, 0x04, 0x30, 0x5b, 0x07, 0x01, 0x5d,\n\t0x3a, 0x90, 0xb5, 0xfe, 0x84, 0xb6, 0x94, 0x9c, 0xf3, 0x1c, 0x7b, 0x11,\n\t0x39, 0x8c, 0x4e, 0xe7, 0x5a, 0x00, 0x52, 0x22, 0xcf, 0xb1, 0xcf, 0x0a,\n\t0x88, 0xd5, 0x2c, 0x2f, 0x36, 0xa2, 0x00, 0x5f, 0xdd, 0xc8, 0x81, 0x40,\n\t0x66, 0xcb, 0xd0, 0xe4, 0xa4, 0x01, 0xc0, 0x4c, 0x44, 0x52, 0xca, 0xb7,\n\t0xcb, 0x45, 0xef, 0xdc, 0x89, 0x73, 0x8b, 0x2c, 0x00, 0x1b, 0xc0, 0x77,\n\t0x1d, 0xfb, 0x75, 0x21, 0xc4, 0x3c, 0x32, 0x29, 0xa8, 0x44, 0xd7, 0xbe,\n\t0x65, 0x4c, 0x2e, 0x8d, 0x19, 0x3f, 0x0c, 0x42, 0x6a, 0xa0, 0x87, 0xd9,\n\t0xac, 0x5e, 0x66, 0x68, 0x27, 0x35, 0x00, 0x82, 0x1b, 0xf7, 0xe5, 0x6f,\n\t0xb3, 0x5a, 0x6e, 0xad, 0x64, 0x70, 0x57, 0xa0, 0xe7, 0xd8, 0xf3, 0x96,\n\t0xa5, 0x4e, 0x21, 0x73, 0x20, 0xf0, 0x66, 0x68, 0x91, 0x79, 0x89, 0x86,\n\t0x61, 0x86, 0x22, 0xd4, 0x25, 0xe8, 0x8c, 0x01, 0x40, 0x21, 0xc4, 0x92,\n\t0xe7, 0xd8, 0xaf, 0x21, 0x67, 0x29, 0xc0, 0x7d, 0x72, 0x2b, 0x00, 0x7d,\n\t0xce, 0x2f, 0xad, 0xa2, 0xe8, 0xb9, 0x17, 0xd2, 0x8d, 0x14, 0x19, 0x02,\n\t0x81, 0x06, 0x81, 0x3a, 0x80, 0x40, 0xed, 0xe7, 0x38, 0x00, 0xb3, 0x25,\n\t0xb4, 0xe8, 0xf0, 0x30, 0x01, 0xc0, 0x56, 0xc1, 0x75, 0xce, 0x21, 0x7d,\n\t0x76, 0xf3, 0x14, 0x00, 0x04, 0xc6, 0x40, 0x00, 0xf6, 0x4e, 0xd6, 0x01,\n\t0x20, 0xf4, 0x5c, 0x7b, 0x45, 0x08, 0x91, 0xe9, 0xb8, 0xf0, 0x48, 0x4e,\n\t0xc0, 0xb7, 0x8e, 0x8c, 0xba, 0xe9, 0xcc, 0x0e, 0xc4, 0x18, 0x85, 0x1e,\n\t0xcd, 0x66, 0xdd, 0x03, 0xa0, 0x6d, 0x4b, 0x5d, 0x28, 0xb8, 0x4e, 0xae,\n\t0xca, 0x80, 0x0d, 0x92, 0x7b, 0x01, 0x48, 0x09, 0x8a, 0xae, 0xf3, 0xba,\n\t0x10, 0xb8, 0x90, 0xed, 0xe5, 0x16, 0x3a, 0xf6, 0xd1, 0xac, 0x2a, 0xcd,\n\t0x30, 0x19, 0x21, 0x04, 0x34, 0x89, 0x1e, 0xf6, 0x66, 0x9e, 0xff, 0x2b,\n\t0xa5, 0x5e, 0x2f, 0x15, 0xdc, 0xb9, 0xb3, 0xf3, 0x2b, 0x2c, 0x00, 0xd7,\n\t0x41, 0x54, 0x70, 0x9d, 0x33, 0x4a, 0xca, 0x79, 0x64, 0x58, 0xe0, 0x27,\n\t0x08, 0xf4, 0xec, 0x5b, 0xa1, 0x05, 0x17, 0x09, 0x65, 0x36, 0x13, 0x42,\n\t0xa0, 0xab, 0x88, 0x51, 0x45, 0xb6, 0xf9, 0x3f, 0x5a, 0x9e, 0x63, 0x9d,\n\t0x00, 0xd0, 0xd9, 0x3f, 0xd3, 0x18, 0x75, 0xe3, 0xaf, 0x48, 0xae, 0x05,\n\t0x60, 0x30, 0x25, 0xb8, 0xe0, 0x39, 0x17, 0x6c, 0xcb, 0x7a, 0x1d, 0x19,\n\t0x8e, 0x55, 0x4a, 0xe2, 0x00, 0xfb, 0x39, 0x0e, 0xc0, 0x6c, 0x32, 0x02,\n\t0x2d, 0xba, 0x29, 0xeb, 0xfa, 0x3f, 0x09, 0x88, 0x05, 0xcf, 0x75, 0x5e,\n\t0x45, 0x4e, 0x03, 0x80, 0x40, 0xce, 0x05, 0xa0, 0x4f, 0xac, 0x0d, 0x2c,\n\t0xa5, 0x56, 0x5c, 0xdb, 0x3a, 0x8e, 0x4c, 0x9d, 0x99, 0x6c, 0x0d, 0xee,\n\t0xda, 0xb7, 0xb3, 0xf9, 0x33, 0x9b, 0x86, 0x36, 0x16, 0xba, 0x34, 0x9b,\n\t0x35, 0xc7, 0x44, 0x5b, 0x4a, 0xcd, 0x15, 0x3d, 0xf7, 0x3c, 0x00, 0x0d,\n\t0xe4, 0x2f, 0x00, 0x08, 0x8c, 0x89, 0x00, 0x58, 0xc9, 0x06, 0x8a, 0xa0,\n\t0x54, 0xf0, 0x5e, 0x97, 0x52, 0xcc, 0x67, 0x79, 0x0f, 0xc1, 0x42, 0xcb,\n\t0xf9, 0x10, 0x57, 0x08, 0x62, 0x36, 0x09, 0x03, 0xdf, 0x4c, 0x0f, 0xb9,\n\t0xfe, 0xaf, 0x8e, 0x55, 0x4b, 0x85, 0xb9, 0x17, 0xde, 0x3c, 0x95, 0xdb,\n\t0x71, 0x68, 0x2c, 0x04, 0x20, 0x25, 0x2a, 0x15, 0xdd, 0x53, 0x96, 0x52,\n\t0x17, 0x90, 0x31, 0xd1, 0xbf, 0x67, 0xdd, 0x82, 0x50, 0xcd, 0x80, 0xe3,\n\t0x00, 0xcc, 0xf5, 0x23, 0xd0, 0x31, 0xfb, 0x87, 0x59, 0xff, 0x6f, 0x15,\n\t0x5c, 0xf7, 0x14, 0x80, 0xce, 0x5d, 0x37, 0x1f, 0x1a, 0x75, 0xe3, 0xdf,\n\t0x95, 0xdc, 0x0b, 0xc0, 0x60, 0x1c, 0xa0, 0xe4, 0xb9, 0xf3, 0xae, 0x6d,\n\t0x0f, 0x11, 0x07, 0x98, 0x45, 0xcf, 0xba, 0x0d, 0x99, 0xb6, 0x11, 0x30,\n\t0xcc, 0x55, 0x20, 0x12, 0x58, 0xa3, 0x5b, 0x86, 0x59, 0xff, 0x3f, 0x5b,\n\t0x2a, 0xb8, 0x2f, 0x22, 0xc7, 0xf3, 0x7f, 0x60, 0x0c, 0x04, 0x60, 0x10,\n\t0x21, 0xc4, 0x92, 0xeb, 0x58, 0xcf, 0x8b, 0x4c, 0x15, 0x82, 0x00, 0x23,\n\t0x3c, 0xb4, 0x9c, 0x0f, 0xf0, 0xa1, 0xa1, 0xcc, 0x75, 0x42, 0x08, 0x4c,\n\t0x03, 0x3e, 0x65, 0xde, 0x64, 0x16, 0xda, 0x96, 0x7a, 0xad, 0xe4, 0xb9,\n\t0x6f, 0x23, 0x3d, 0xdc, 0x36, 0x8f, 0xf3, 0x7f, 0x60, 0xcc, 0x04, 0x00,\n\t0x80, 0x5f, 0x2e, 0x7a, 0x2f, 0x4b, 0x29, 0xce, 0x22, 0xa3, 0x5f, 0xdf,\n\t0x76, 0xde, 0x8f, 0x48, 0x4e, 0x66, 0x7d, 0x39, 0xc3, 0x5c, 0x01, 0x42,\n\t0xc7, 0xcc, 0x22, 0x40, 0xe6, 0x72, 0x73, 0xbe, 0xe3, 0xd8, 0x6f, 0x17,\n\t0x3c, 0x67, 0xd1, 0x0f, 0xaf, 0x99, 0xbd, 0x3e, 0x52, 0xc6, 0x42, 0x00,\n\t0x06, 0xd4, 0x53, 0x57, 0x8a, 0x85, 0x79, 0xd7, 0xb6, 0xe6, 0x90, 0xe1,\n\t0xd8, 0x70, 0x01, 0x42, 0xa0, 0x0e, 0xa1, 0x63, 0xdf, 0xc9, 0xd3, 0x00,\n\t0x66, 0xc3, 0x10, 0x49, 0x34, 0xcd, 0x1d, 0x99, 0x13, 0xcb, 0xa4, 0x10,\n\t0x4b, 0xe5, 0x82, 0xfb, 0x06, 0x80, 0x9e, 0xe7, 0x64, 0x2a, 0x1a, 0x3a,\n\t0x32, 0xc6, 0x42, 0x00, 0xfa, 0xac, 0xb5, 0x7b, 0x70, 0x6c, 0x6b, 0xde,\n\t0x73, 0x9c, 0x17, 0x00, 0xb4, 0xaf, 0xfd, 0x0e, 0x82, 0x16, 0x45, 0xb4,\n\t0xdc, 0xfb, 0xb3, 0x56, 0x6f, 0x65, 0x98, 0x75, 0x10, 0x7c, 0x33, 0x89,\n\t0x0e, 0x1d, 0xcc, 0xea, 0xfe, 0x6b, 0xa5, 0xe4, 0xf1, 0x52, 0xc1, 0x7b,\n\t0x15, 0x19, 0xce, 0xb3, 0x18, 0x35, 0x63, 0x25, 0x00, 0xe9, 0x99, 0x6a,\n\t0xad, 0x52, 0xc1, 0x7b, 0x41, 0x49, 0x79, 0x3e, 0xeb, 0xfb, 0x5a, 0xf6,\n\t0x07, 0x10, 0xa9, 0x69, 0xf0, 0x34, 0x80, 0x19, 0x1e, 0x42, 0x47, 0xef,\n\t0x47, 0x88, 0x06, 0x32, 0x3e, 0x3f, 0x5d, 0xd7, 0xb1, 0x9f, 0xaf, 0x96,\n\t0x0a, 0xa7, 0x9f, 0x3d, 0x76, 0x9c, 0x80, 0xfc, 0xce, 0xff, 0x81, 0x31,\n\t0x13, 0x80, 0x94, 0xa8, 0x5a, 0x2e, 0x1c, 0xb3, 0x2d, 0xf5, 0x06, 0x32,\n\t0x6c, 0x0f, 0x4e, 0xca, 0x84, 0x1d, 0x44, 0xc7, 0x3e, 0xca, 0xd3, 0x00,\n\t0x66, 0x68, 0x0c, 0x29, 0xac, 0x52, 0x76, 0xf7, 0x5f, 0x08, 0xb1, 0x50,\n\t0x2a, 0xb8, 0xcf, 0x03, 0x58, 0x7d, 0xdf, 0xad, 0x87, 0x47, 0xdd, 0xfc,\n\t0x6b, 0x32, 0x36, 0x02, 0xd0, 0x57, 0xd1, 0xe5, 0xb5, 0x0e, 0x79, 0x8e,\n\t0x7d, 0xae, 0xe0, 0x39, 0xcf, 0x21, 0xf3, 0x34, 0xa0, 0x80, 0x35, 0xe7,\n\t0xc3, 0xe9, 0x31, 0x4e, 0x0c, 0x93, 0x15, 0x82, 0x6f, 0xa6, 0xd0, 0xa5,\n\t0x03, 0x99, 0xdd, 0x7f, 0x4b, 0xc9, 0xb7, 0xab, 0xa5, 0xe2, 0x2b, 0x18,\n\t0x03, 0xf7, 0x1f, 0x18, 0x23, 0x01, 0xe8, 0x33, 0x51, 0x2d, 0x01, 0x40,\n\t0xab, 0x5c, 0x28, 0x64, 0x9e, 0x06, 0x08, 0x10, 0xda, 0xce, 0x7d, 0x08,\n\t0xac, 0x43, 0xe0, 0x62, 0xa1, 0xcc, 0x30, 0xac, 0xe9, 0x23, 0x88, 0x50,\n\t0x47, 0x56, 0xf7, 0xdf, 0x73, 0xec, 0x17, 0xaa, 0xa5, 0xc2, 0x99, 0x9f,\n\t0xbe, 0xf8, 0x46, 0xee, 0xdd, 0x7f, 0x60, 0x0c, 0x05, 0x20, 0x25, 0xaa,\n\t0x55, 0x8a, 0xc7, 0x1c, 0xdb, 0xca, 0x34, 0x0d, 0x40, 0x9a, 0x14, 0xd4,\n\t0x74, 0x3e, 0x02, 0x3e, 0x3d, 0x98, 0xc9, 0x4a, 0x6c, 0x3c, 0xac, 0x9a,\n\t0xa3, 0xd9, 0xcf, 0xff, 0x13, 0x62, 0xa1, 0x54, 0xf0, 0x9e, 0x07, 0xb0,\n\t0x7a, 0xff, 0xd1, 0xf7, 0x8c, 0xba, 0xf9, 0xd9, 0xda, 0x3c, 0xea, 0x06,\n\t0x0c, 0x43, 0x5f, 0x4d, 0x17, 0x57, 0x5b, 0xe4, 0xda, 0xd6, 0x5c, 0xc1,\n\t0x73, 0x7e, 0x8a, 0xac, 0xa7, 0x06, 0x41, 0x61, 0xd5, 0xfb, 0x14, 0x62,\n\t0x39, 0x01, 0x0e, 0x06, 0x32, 0xd7, 0xc6, 0xa0, 0xa5, 0x6f, 0x44, 0x17,\n\t0x07, 0xb2, 0xc6, 0x8e, 0xb4, 0x65, 0xa9, 0xb7, 0x6b, 0xe5, 0xf1, 0x71,\n\t0xff, 0x81, 0x31, 0x13, 0x80, 0x3e, 0x53, 0xf5, 0x0a, 0x00, 0x34, 0x2b,\n\t0x45, 0xef, 0xef, 0x95, 0x94, 0x6f, 0x22, 0xe3, 0xe1, 0xa1, 0x3d, 0xeb,\n\t0x56, 0xb4, 0xed, 0xf7, 0x71, 0x30, 0x90, 0xb9, 0x26, 0x44, 0x0a, 0x2b,\n\t0xe6, 0x4e, 0x98, 0xec, 0x9b, 0xc9, 0xba, 0x9e, 0xe3, 0xbc, 0x50, 0x2e,\n\t0x7a, 0x67, 0x9e, 0x79, 0x35, 0xff, 0xd1, 0xff, 0x3e, 0x63, 0x27, 0x00,\n\t0xfd, 0x4e, 0x15, 0x37, 0x3c, 0xa8, 0xeb, 0x95, 0xd2, 0x09, 0x37, 0x39,\n\t0x71, 0xe5, 0x9a, 0x7b, 0x03, 0x92, 0x60, 0x60, 0x09, 0xab, 0xde, 0x27,\n\t0x61, 0x84, 0x37, 0xea, 0xcb, 0x60, 0x72, 0x0d, 0xa1, 0xab, 0x67, 0xd0,\n\t0xa6, 0x9b, 0xb3, 0x06, 0xff, 0x48, 0x4a, 0x79, 0xa1, 0x52, 0x4a, 0xdc,\n\t0xff, 0x7b, 0x6f, 0xcf, 0x7f, 0xf4, 0xbf, 0xcf, 0xd8, 0x09, 0x40, 0x9f,\n\t0xf8, 0xf8, 0xe3, 0xb0, 0x94, 0x5a, 0xac, 0x14, 0x0b, 0xcf, 0x66, 0xdd,\n\t0x1b, 0x20, 0x60, 0xd0, 0x72, 0xee, 0x43, 0xcf, 0xba, 0x89, 0xbd, 0x00,\n\t0xe6, 0xaa, 0xac, 0x9a, 0xdb, 0x10, 0xa2, 0x86, 0x8c, 0xd3, 0xc5, 0xd0,\n\t0xb5, 0xad, 0x5f, 0x34, 0x2a, 0xa5, 0x9f, 0x63, 0x8c, 0xdc, 0x7f, 0x60,\n\t0x8c, 0x05, 0x40, 0xc9, 0xa4, 0x46, 0x40, 0xbd, 0x52, 0x7a, 0xde, 0xb6,\n\t0xd4, 0x31, 0x64, 0x48, 0x0d, 0x4e, 0x0a, 0x85, 0xcc, 0x60, 0xc5, 0xfd,\n\t0x2c, 0x6f, 0x10, 0x62, 0xde, 0x05, 0x42, 0xa0, 0x6b, 0x58, 0x35, 0x47,\n\t0x33, 0x87, 0x8b, 0x85, 0x10, 0x6b, 0xa5, 0x82, 0xf7, 0x8c, 0xeb, 0xd8,\n\t0xa7, 0xa3, 0x58, 0x8f, 0x8d, 0xfb, 0x0f, 0x8c, 0xb1, 0x00, 0xa4, 0xe8,\n\t0x72, 0xd1, 0x3b, 0x56, 0xf4, 0xdc, 0xc7, 0x90, 0x31, 0x18, 0x08, 0x08,\n\t0xac, 0x78, 0x0f, 0xa1, 0x67, 0x1d, 0x06, 0x2f, 0x09, 0x32, 0x57, 0x62,\n\t0x45, 0xdf, 0x01, 0x1f, 0x7b, 0x90, 0x71, 0xf4, 0xd7, 0x96, 0x92, 0x6f,\n\t0xd6, 0x2b, 0xc5, 0x5f, 0x00, 0xe8, 0xda, 0xd6, 0x78, 0x15, 0xa2, 0x1d,\n\t0x4b, 0x01, 0x58, 0xa7, 0xae, 0xab, 0xb5, 0x72, 0xe1, 0x27, 0x4a, 0xc9,\n\t0x37, 0x90, 0xc9, 0xa2, 0x0d, 0x42, 0xb5, 0x1f, 0x2b, 0xde, 0x67, 0x01,\n\t0xae, 0x1a, 0xcc, 0x5c, 0x46, 0x52, 0xf4, 0x73, 0xc9, 0x7c, 0x60, 0x98,\n\t0x8a, 0xd2, 0xcd, 0xa2, 0xe7, 0xfe, 0xa8, 0x56, 0x2e, 0xbe, 0x8c, 0x4c,\n\t0x5e, 0x68, 0xbe, 0x18, 0x4b, 0x01, 0x18, 0xe4, 0xdb, 0x3f, 0x7e, 0x5e,\n\t0x4f, 0x54, 0x2b, 0xc7, 0x0b, 0x8e, 0xf3, 0x12, 0x32, 0x17, 0x5f, 0x90,\n\t0x58, 0xf1, 0x3e, 0x0b, 0xdf, 0xba, 0x01, 0xec, 0x05, 0x30, 0x83, 0xac,\n\t0xe8, 0x3b, 0xe1, 0x63, 0x6f, 0xd6, 0x18, 0x11, 0x29, 0x29, 0x8f, 0x57,\n\t0x4b, 0x85, 0x1f, 0x01, 0x58, 0xea, 0xff, 0x70, 0x5c, 0xdc, 0x7f, 0x60,\n\t0x8c, 0x05, 0xa0, 0xdf, 0xc9, 0xbf, 0xfc, 0xc0, 0x5d, 0x90, 0x52, 0x2c,\n\t0x56, 0x4a, 0x85, 0x1f, 0x4b, 0x21, 0x4e, 0x23, 0x93, 0xdf, 0x66, 0x10,\n\t0xa8, 0x83, 0x58, 0x75, 0x1f, 0x1a, 0xe7, 0x2e, 0x60, 0x36, 0x15, 0x42,\n\t0x60, 0xaa, 0x58, 0x36, 0xef, 0x1f, 0x66, 0xf4, 0x0f, 0x3c, 0xd7, 0x7e,\n\t0x75, 0xa2, 0x5a, 0xce, 0x98, 0x90, 0x96, 0x3f, 0xc6, 0xfe, 0xe9, 0x97,\n\t0x89, 0x10, 0xf8, 0x13, 0xb5, 0xf2, 0x53, 0x8e, 0x6d, 0xfd, 0x14, 0x99,\n\t0xbd, 0x00, 0x85, 0x65, 0xef, 0x73, 0x08, 0xd4, 0x41, 0xb0, 0x17, 0xc0,\n\t0x00, 0xc0, 0x4a, 0x7c, 0x07, 0x7a, 0xd8, 0x97, 0x79, 0x85, 0x48, 0x0a,\n\t0x71, 0xae, 0x5a, 0x2a, 0xfc, 0xd8, 0xb2, 0xd4, 0x85, 0x28, 0xd6, 0x00,\n\t0xc6, 0x6b, 0xf4, 0x07, 0xc6, 0x5c, 0x00, 0xfa, 0x9d, 0x6d, 0x88, 0xc8,\n\t0x73, 0xec, 0xb9, 0x6a, 0xa9, 0xf0, 0x53, 0x21, 0x44, 0xc6, 0x60, 0xa0,\n\t0x81, 0x6f, 0x1d, 0xc6, 0x52, 0xe1, 0x97, 0xc1, 0xb1, 0x80, 0xdd, 0x0e,\n\t0xc1, 0xd7, 0x35, 0x2c, 0x99, 0x7b, 0x31, 0x84, 0x49, 0xf8, 0xae, 0x63,\n\t0x3f, 0x3d, 0x59, 0xab, 0x3c, 0x89, 0x31, 0x0c, 0xfe, 0xf5, 0x19, 0x6b,\n\t0x01, 0xb8, 0x78, 0x11, 0xa9, 0x17, 0x30, 0x59, 0xaf, 0xbc, 0xe8, 0xda,\n\t0xd6, 0xeb, 0xc8, 0x1c, 0x8c, 0x51, 0x58, 0xf6, 0x7e, 0x09, 0x5d, 0x9b,\n\t0x0b, 0x87, 0xee, 0x66, 0x88, 0x80, 0x25, 0xfd, 0x7e, 0xf8, 0xd8, 0x87,\n\t0xac, 0x69, 0xe2, 0xe9, 0xe8, 0xff, 0xa8, 0xeb, 0xd8, 0x27, 0xfd, 0x30,\n\t0x32, 0xc0, 0xf8, 0x8d, 0xfe, 0xc0, 0x0e, 0x11, 0x80, 0x14, 0x53, 0xf4,\n\t0xdc, 0xd7, 0x2b, 0xa5, 0xc2, 0x13, 0xc3, 0x78, 0x01, 0xa1, 0x9a, 0xc5,\n\t0x62, 0xe1, 0x8b, 0x30, 0xc2, 0x01, 0xef, 0x11, 0xd8, 0x8d, 0x10, 0x7a,\n\t0x7a, 0x1a, 0xcb, 0xe6, 0x03, 0x18, 0x62, 0xa3, 0x98, 0xef, 0xd8, 0xd6,\n\t0xd3, 0x13, 0xe9, 0xe8, 0x9f, 0xf7, 0xb2, 0x5f, 0x57, 0x63, 0xec, 0x05,\n\t0x60, 0x9d, 0xea, 0xae, 0x4c, 0xd6, 0x2a, 0x4f, 0xb8, 0xb6, 0x35, 0xd4,\n\t0x92, 0xcc, 0xaa, 0xfb, 0xa9, 0x74, 0x8f, 0x00, 0x0b, 0xc0, 0x6e, 0x83,\n\t0x48, 0x62, 0x51, 0xdf, 0x87, 0x10, 0xd9, 0x37, 0x89, 0x09, 0x21, 0xce,\n\t0x55, 0x4a, 0x85, 0x47, 0x8b, 0x9e, 0x73, 0xd2, 0x0f, 0xc6, 0x77, 0xf4,\n\t0x07, 0x76, 0x80, 0x00, 0xac, 0x43, 0x97, 0x0a, 0xee, 0x2b, 0xd5, 0x52,\n\t0xe1, 0xf1, 0xec, 0x5e, 0x00, 0x21, 0x96, 0x93, 0x58, 0x28, 0x7e, 0x09,\n\t0xb1, 0x28, 0x83, 0xbd, 0x80, 0xdd, 0x84, 0x41, 0x5b, 0xef, 0xc7, 0x0a,\n\t0xdd, 0x3d, 0xcc, 0x9b, 0x7c, 0xd7, 0xb6, 0x9e, 0x9e, 0xaa, 0xa7, 0xa3,\n\t0xbf, 0x3b, 0xbe, 0xa3, 0x3f, 0xb0, 0x43, 0x04, 0x60, 0x7d, 0x62, 0xd0,\n\t0x44, 0xad, 0xf2, 0xa8, 0x3b, 0xd4, 0x8a, 0x00, 0x61, 0xcd, 0xf9, 0x08,\n\t0x9a, 0xee, 0x27, 0xc0, 0x02, 0xb0, 0x7b, 0xd0, 0xc6, 0xc1, 0x7c, 0x7c,\n\t0x3f, 0x22, 0x54, 0x90, 0xf1, 0xbe, 0x93, 0x48, 0xe7, 0xfe, 0x45, 0xcf,\n\t0x3d, 0x19, 0xc5, 0xf1, 0x58, 0x8f, 0xfe, 0xc0, 0x0e, 0x11, 0x80, 0x75,\n\t0xe8, 0x52, 0xc1, 0x7d, 0xb1, 0x5a, 0x2a, 0xfc, 0x8d, 0x10, 0xe2, 0x24,\n\t0x32, 0xdd, 0x59, 0x82, 0x11, 0x45, 0x5c, 0x28, 0xfd, 0x0b, 0x04, 0x8a,\n\t0x93, 0x83, 0x76, 0x07, 0x06, 0x2b, 0xf1, 0xed, 0x58, 0xc3, 0x1d, 0xc3,\n\t0x94, 0x88, 0x09, 0x3c, 0xc7, 0x7e, 0x7a, 0xaa, 0x51, 0x7d, 0x12, 0x40,\n\t0xc7, 0xb6, 0xc6, 0x7f, 0x3f, 0xc9, 0x8e, 0x11, 0x00, 0x21, 0xc4, 0xa0,\n\t0x12, 0x77, 0xa6, 0x1b, 0xd5, 0x7f, 0xf0, 0x1c, 0x7b, 0x08, 0x2f, 0xc0,\n\t0xa0, 0x67, 0xdd, 0x8a, 0xf9, 0xe2, 0x6f, 0x80, 0x84, 0x0b, 0xf6, 0x04,\n\t0x76, 0x32, 0x04, 0x5f, 0x4f, 0xe0, 0x82, 0x79, 0x10, 0x1a, 0x1e, 0x32,\n\t0x47, 0xfe, 0xa5, 0x38, 0x57, 0xaf, 0x94, 0x1e, 0x2d, 0xb8, 0xce, 0x49,\n\t0x43, 0x34, 0x56, 0x9b, 0x7e, 0xde, 0xf5, 0x9a, 0x46, 0xdd, 0x80, 0x2d,\n\t0x82, 0x3c, 0xd7, 0x39, 0x5b, 0xaf, 0x94, 0x7e, 0x20, 0xa5, 0x38, 0x85,\n\t0xcc, 0xd6, 0x2c, 0xb0, 0x5c, 0xf8, 0x25, 0x34, 0x9d, 0x8f, 0x66, 0x7f,\n\t0x0b, 0x33, 0x76, 0x18, 0x52, 0x38, 0x1f, 0x7f, 0x0c, 0xfe, 0x10, 0x49,\n\t0x3f, 0x00, 0xba, 0x05, 0xd7, 0xfd, 0xd9, 0x74, 0xa3, 0xf2, 0x24, 0x80,\n\t0x8e, 0x1c, 0x73, 0xc3, 0xef, 0xb3, 0xe3, 0x04, 0x60, 0x40, 0x91, 0xfd,\n\t0x99, 0x89, 0xea, 0x53, 0x45, 0xcf, 0x7d, 0x12, 0x99, 0xaa, 0x07, 0x03,\n\t0x49, 0xd1, 0x90, 0x1a, 0x2e, 0x94, 0xfe, 0x0d, 0x42, 0xb5, 0x0f, 0x3c,\n\t0x15, 0xd8, 0x89, 0x18, 0xac, 0xc6, 0xb7, 0x60, 0x85, 0xee, 0x19, 0xe6,\n\t0x4d, 0x5a, 0x49, 0xf9, 0xca, 0x44, 0xb5, 0xf4, 0x35, 0xdb, 0xb2, 0x8e,\n\t0x23, 0x1d, 0x1d, 0xc6, 0x7d, 0xf4, 0x07, 0x76, 0xa0, 0x00, 0x0c, 0x40,\n\t0x96, 0x52, 0x67, 0x26, 0xaa, 0xe5, 0xaf, 0x2b, 0x25, 0x9f, 0x47, 0xe6,\n\t0x65, 0x41, 0x83, 0x8e, 0xfd, 0x5e, 0x2c, 0x16, 0xbe, 0x04, 0xe2, 0xd3,\n\t0x84, 0x76, 0x18, 0xc9, 0x5e, 0xff, 0xf3, 0xfa, 0x13, 0x99, 0x8f, 0xf9,\n\t0x4e, 0x69, 0x96, 0x0a, 0xee, 0xa3, 0x33, 0x13, 0xb5, 0xbf, 0x03, 0xd0,\n\t0x1d, 0xf5, 0x55, 0x6c, 0x26, 0x3b, 0x52, 0x00, 0xfa, 0xca, 0xfc, 0xad,\n\t0x27, 0x9e, 0xf6, 0xa7, 0x1b, 0xd5, 0x9f, 0x55, 0x8a, 0x85, 0x47, 0x90,\n\t0xb9, 0x5e, 0x40, 0xd2, 0x2d, 0x0b, 0x85, 0x5f, 0x43, 0xd3, 0x7d, 0x10,\n\t0xec, 0x05, 0xec, 0x1c, 0x0c, 0x59, 0x38, 0x1f, 0x7f, 0x1c, 0xbd, 0xec,\n\t0x85, 0x3e, 0x81, 0xe4, 0xa4, 0xdf, 0xe7, 0xa6, 0xea, 0xd5, 0x1f, 0x00,\n\t0x58, 0xec, 0xff, 0x70, 0x27, 0x8c, 0xfe, 0xc0, 0x0e, 0x15, 0x80, 0x3e,\n\t0xbf, 0xfa, 0x89, 0xfb, 0x00, 0x60, 0x65, 0xba, 0x51, 0xfd, 0xa1, 0x6b,\n\t0x5b, 0x4f, 0x23, 0x53, 0xed, 0x40, 0x00, 0x20, 0x68, 0x59, 0xc7, 0xb9,\n\t0xf2, 0x6f, 0xa3, 0x67, 0xbd, 0x87, 0xd3, 0x84, 0x77, 0x02, 0x64, 0xb0,\n\t0x14, 0xbd, 0x17, 0xcb, 0x34, 0x54, 0xc6, 0x1f, 0x09, 0x21, 0xce, 0x56,\n\t0x4b, 0xc5, 0xbf, 0x69, 0x54, 0x4b, 0xbf, 0xc0, 0x18, 0xee, 0xf7, 0xbf,\n\t0x16, 0x3b, 0x56, 0x00, 0x06, 0x4f, 0x14, 0xae, 0x96, 0x0a, 0x2f, 0x35,\n\t0xaa, 0xe5, 0xbf, 0x92, 0x52, 0xbc, 0x8d, 0xcc, 0x43, 0xba, 0x41, 0xcf,\n\t0xba, 0x05, 0xe7, 0x4b, 0xff, 0x0e, 0xb1, 0xa8, 0x82, 0x83, 0x82, 0xe3,\n\t0x8c, 0x41, 0x5b, 0x1f, 0xc0, 0x79, 0xf3, 0xe9, 0xf4, 0x74, 0xa8, 0xcc,\n\t0xf7, 0xd2, 0x2f, 0xb8, 0xce, 0xcf, 0xf6, 0x4e, 0xd5, 0x7f, 0x04, 0xa0,\n\t0x09, 0xbc, 0x63, 0xb5, 0x69, 0xec, 0xd9, 0xb1, 0x02, 0x00, 0x5c, 0x26,\n\t0x02, 0xed, 0x3d, 0x13, 0xb5, 0x1f, 0x15, 0x3d, 0xf7, 0xbb, 0x18, 0x28,\n\t0xdc, 0x70, 0x6d, 0x0c, 0x56, 0xbd, 0x4f, 0x61, 0xa1, 0xf8, 0xa5, 0x61,\n\t0xf6, 0x88, 0x33, 0xb9, 0x82, 0x10, 0xe9, 0x12, 0xe6, 0xe2, 0x87, 0x10,\n\t0x62, 0x12, 0x43, 0x18, 0xbf, 0x56, 0x4a, 0xbe, 0x32, 0x55, 0xaf, 0x7c,\n\t0xc3, 0x73, 0xec, 0x13, 0xc3, 0xbc, 0x71, 0x9c, 0xd8, 0xd1, 0x02, 0x30,\n\t0x00, 0x59, 0x96, 0x3a, 0x3b, 0xd3, 0xa8, 0x7e, 0xdb, 0xb6, 0xac, 0x17,\n\t0x30, 0x84, 0x2b, 0x47, 0xb0, 0x31, 0x5f, 0xfc, 0x4d, 0xb4, 0x9c, 0x8f,\n\t0xf0, 0x54, 0x60, 0x0c, 0x31, 0x24, 0x71, 0x2e, 0x7e, 0x00, 0x2d, 0xdc,\n\t0x82, 0x21, 0x6c, 0x98, 0x84, 0xc0, 0x7c, 0xb5, 0x54, 0xfc, 0xde, 0x74,\n\t0xa3, 0xfa, 0x63, 0xa4, 0x81, 0xbf, 0x9d, 0x34, 0xf2, 0xf7, 0xd9, 0xf1,\n\t0x02, 0xd0, 0xbf, 0x69, 0xdd, 0x5e, 0x10, 0x37, 0xaa, 0xe5, 0x57, 0x27,\n\t0xaa, 0xa5, 0x47, 0xa4, 0x10, 0x73, 0xc8, 0xfc, 0x34, 0x10, 0x62, 0x39,\n\t0x81, 0xb3, 0x95, 0x2f, 0xa3, 0x6b, 0xdd, 0x0a, 0x0e, 0x0a, 0x8e, 0x11,\n\t0x44, 0x58, 0x8a, 0xee, 0xc2, 0x12, 0x7d, 0x08, 0x43, 0x1e, 0x09, 0xe7,\n\t0x17, 0x5c, 0xe7, 0x27, 0x7b, 0x27, 0xeb, 0xdf, 0x04, 0xb0, 0x30, 0xea,\n\t0xcb, 0xd8, 0x4a, 0x76, 0xbc, 0x00, 0xf4, 0x29, 0x16, 0x5c, 0x00, 0x58,\n\t0xdd, 0x33, 0x59, 0xff, 0x5e, 0xa9, 0xe0, 0x3e, 0x06, 0xa0, 0x93, 0xfd,\n\t0xdd, 0x06, 0x5d, 0xeb, 0x76, 0x9c, 0xad, 0xfc, 0x27, 0x44, 0x72, 0x0f,\n\t0x58, 0x04, 0xc6, 0x01, 0x83, 0xd5, 0xf8, 0x08, 0xe6, 0xcc, 0xe7, 0x60,\n\t0x86, 0xc8, 0xf6, 0x03, 0x10, 0x5a, 0x4a, 0x3d, 0x3b, 0xdd, 0xa8, 0xfd,\n\t0x75, 0xd1, 0x73, 0x8e, 0x01, 0xd0, 0xc0, 0xce, 0x1c, 0xfd, 0x81, 0x5d,\n\t0x22, 0x00, 0x03, 0x37, 0xcf, 0xd8, 0x96, 0x7a, 0x7b, 0x7a, 0xa2, 0xf6,\n\t0x35, 0xc7, 0xb6, 0x9e, 0x42, 0xe6, 0x55, 0x81, 0xe4, 0x50, 0x91, 0x35,\n\t0xe7, 0xc3, 0x38, 0x57, 0xfe, 0x2d, 0x18, 0x51, 0xc2, 0x0e, 0x9d, 0x12,\n\t0xee, 0x10, 0x0c, 0x3a, 0xf1, 0x2c, 0xce, 0xe8, 0x5f, 0x41, 0x94, 0xfd,\n\t0x70, 0x0f, 0x20, 0x89, 0xfa, 0x9f, 0xaa, 0x95, 0x8b, 0x7f, 0x3a, 0x55,\n\t0xaf, 0x3c, 0x86, 0x1d, 0xec, 0xfa, 0xf7, 0xd9, 0x15, 0x02, 0xb0, 0x0e,\n\t0xbf, 0x51, 0x29, 0xfd, 0x64, 0xa2, 0x56, 0xf9, 0x13, 0x25, 0xe5, 0x5b,\n\t0x18, 0x6a, 0x38, 0x17, 0x58, 0xf2, 0xfe, 0x11, 0xe6, 0x8b, 0xff, 0x94,\n\t0x0f, 0x16, 0xc9, 0x2d, 0x84, 0x40, 0xd7, 0x71, 0x26, 0xfe, 0x3c, 0x02,\n\t0xcc, 0x0c, 0x5b, 0xe3, 0x61, 0xb5, 0xe8, 0x39, 0x8f, 0xec, 0x9b, 0xaa,\n\t0xff, 0x00, 0xc0, 0x0a, 0xb0, 0xb3, 0x8d, 0x1f, 0xd8, 0x45, 0x02, 0xb0,\n\t0x6e, 0xf9, 0x66, 0x6d, 0xef, 0x64, 0xfd, 0x89, 0x4a, 0xa9, 0xf0, 0x1d,\n\t0x01, 0x9c, 0xc3, 0x10, 0xf1, 0x00, 0x12, 0x0e, 0x2e, 0x14, 0xff, 0x35,\n\t0x56, 0xbc, 0xcf, 0x64, 0x7f, 0x1b, 0xb3, 0x4d, 0x10, 0x62, 0xe3, 0xe0,\n\t0x6c, 0xf4, 0x10, 0xda, 0xb8, 0x19, 0x43, 0xde, 0x9f, 0xc0, 0xb1, 0xac,\n\t0x5f, 0x4c, 0x37, 0xaa, 0x5f, 0x73, 0x1d, 0x3b, 0xe3, 0x2e, 0xd2, 0xf1,\n\t0x67, 0xd7, 0x08, 0xc0, 0x3a, 0x48, 0x49, 0x71, 0x76, 0xdf, 0x54, 0xfd,\n\t0xab, 0x05, 0xcf, 0x7d, 0x04, 0x99, 0xf7, 0x0a, 0x00, 0xfd, 0x24, 0xa1,\n\t0xb9, 0xf2, 0x7f, 0x40, 0xd3, 0x79, 0x00, 0xbb, 0xe4, 0x39, 0x19, 0x03,\n\t0x08, 0xda, 0x58, 0x38, 0x1b, 0x7d, 0x06, 0x2b, 0x78, 0x3f, 0x86, 0xbc,\n\t0x2f, 0x5a, 0x49, 0xf9, 0xd2, 0x44, 0xad, 0xfc, 0x27, 0x93, 0xb5, 0xca,\n\t0xb3, 0x48, 0x4b, 0x7c, 0xef, 0xf4, 0xd1, 0x1f, 0xd8, 0x85, 0x02, 0x30,\n\t0x70, 0x53, 0xe3, 0xa2, 0xe7, 0xbe, 0x31, 0x33, 0x51, 0xfb, 0xba, 0x63,\n\t0x59, 0xcf, 0x60, 0x88, 0x78, 0x40, 0xff, 0x74, 0xa1, 0x33, 0xd5, 0xff,\n\t0x82, 0x96, 0x73, 0x3f, 0x58, 0x04, 0x46, 0x4d, 0xdf, 0xf8, 0x3f, 0x8d,\n\t0x45, 0xba, 0x7f, 0xe8, 0x37, 0x0b, 0x21, 0x2e, 0xd4, 0x2a, 0xc5, 0x6f,\n\t0xcf, 0xce, 0x4c, 0xfc, 0x2d, 0x80, 0xd5, 0x51, 0x5f, 0xcd, 0x76, 0xb2,\n\t0xeb, 0x04, 0x60, 0x1d, 0xc1, 0x64, 0xad, 0xfc, 0xf7, 0x13, 0xb5, 0xf2,\n\t0x1f, 0x2b, 0x29, 0xdf, 0xc4, 0x50, 0xf1, 0x00, 0x83, 0x40, 0x1d, 0xc2,\n\t0xe9, 0xca, 0xef, 0xa0, 0xe5, 0xdc, 0x3b, 0xdc, 0x5b, 0x99, 0x4d, 0x84,\n\t0x60, 0x8c, 0x85, 0x73, 0xd1, 0x83, 0x58, 0xa4, 0x8f, 0x62, 0xc8, 0x47,\n\t0x9a, 0x00, 0xcc, 0x97, 0x0b, 0xee, 0xb7, 0x66, 0xa7, 0x1a, 0xdf, 0x14,\n\t0x03, 0x4b, 0x7e, 0xbb, 0x61, 0xf4, 0x07, 0x76, 0xa9, 0x00, 0xac, 0x8b,\n\t0x07, 0x34, 0x67, 0xa7, 0x1b, 0x8f, 0xd7, 0x2b, 0xa5, 0xef, 0x0a, 0x21,\n\t0xce, 0x63, 0xa8, 0xe1, 0xdc, 0xc0, 0xb7, 0x8e, 0xe0, 0x4c, 0xe5, 0x77,\n\t0xd0, 0xb1, 0xef, 0x01, 0x8b, 0xc0, 0x76, 0x43, 0x30, 0xa4, 0x70, 0x2e,\n\t0xfe, 0x18, 0xe6, 0xe9, 0xe3, 0x1b, 0x09, 0xcc, 0xae, 0x78, 0x8e, 0xfd,\n\t0xad, 0xbd, 0x53, 0xf5, 0xff, 0xe3, 0x3a, 0xf6, 0xab, 0xd8, 0xe1, 0x4b,\n\t0x7e, 0x57, 0x62, 0x57, 0x0a, 0xc0, 0x3a, 0x48, 0x08, 0x31, 0xb7, 0x6f,\n\t0xba, 0xfe, 0xa7, 0x95, 0xa2, 0xf7, 0x97, 0x18, 0x2a, 0x28, 0x08, 0xf4,\n\t0xf7, 0x0c, 0x9c, 0xae, 0xfe, 0x57, 0x74, 0xec, 0xbb, 0xc0, 0x22, 0xb0,\n\t0x5d, 0x24, 0xc6, 0x7f, 0x21, 0xfc, 0x30, 0xe6, 0xcd, 0x83, 0xe9, 0xd6,\n\t0xed, 0xa1, 0xa6, 0x62, 0xa1, 0x63, 0x5b, 0x3f, 0xdf, 0x3b, 0x59, 0xff,\n\t0xf3, 0x6a, 0xa9, 0xf8, 0x12, 0x76, 0xd1, 0xbc, 0x7f, 0x90, 0x5d, 0x2d,\n\t0x00, 0x83, 0xf1, 0x00, 0xd7, 0xb6, 0x8f, 0xed, 0x9d, 0x6a, 0x7c, 0xa5,\n\t0xe0, 0x3a, 0x7f, 0x8d, 0xa1, 0xb3, 0xbf, 0x92, 0x44, 0xa1, 0x53, 0xd5,\n\t0xff, 0x86, 0x36, 0x4f, 0x07, 0xb6, 0x81, 0x64, 0xce, 0x7f, 0x2e, 0x7c,\n\t0x00, 0xe7, 0x69, 0xe8, 0x0d, 0x3e, 0x00, 0x10, 0x5a, 0x4a, 0x3e, 0x3b,\n\t0x55, 0xaf, 0x7e, 0x75, 0xb2, 0x5e, 0x79, 0xee, 0x89, 0xe7, 0xdf, 0xdc,\n\t0x95, 0xc6, 0x0f, 0xec, 0x72, 0x01, 0x00, 0x2e, 0xdd, 0x74, 0x22, 0xc4,\n\t0x95, 0xa2, 0x77, 0x6c, 0xdf, 0x74, 0xe3, 0xcf, 0x3c, 0xc7, 0xfe, 0x07,\n\t0x0c, 0x15, 0x14, 0x04, 0x00, 0x42, 0xcf, 0xba, 0x15, 0xa7, 0x2a, 0xbf,\n\t0x87, 0x96, 0xf3, 0x61, 0x70, 0x60, 0x70, 0xab, 0x20, 0x68, 0x63, 0x63,\n\t0x2e, 0xfe, 0x24, 0x2e, 0xd0, 0x27, 0x37, 0x64, 0xfc, 0x52, 0x8a, 0xe7,\n\t0x1a, 0xd5, 0xf2, 0x1f, 0xec, 0x9b, 0xaa, 0x7f, 0x17, 0x40, 0xf3, 0x13,\n\t0x77, 0xdf, 0x3c, 0xea, 0x8b, 0x1a, 0x19, 0xbb, 0x5e, 0x00, 0x80, 0x7e,\n\t0x4c, 0x00, 0xf0, 0xc3, 0x28, 0x6e, 0x54, 0x4a, 0xaf, 0xec, 0x99, 0xac,\n\t0xff, 0x99, 0x93, 0xd4, 0x0f, 0x08, 0x86, 0xfb, 0x24, 0x03, 0xdf, 0xba,\n\t0x09, 0xa7, 0xaa, 0xbf, 0x87, 0xa6, 0xfb, 0xf1, 0x51, 0x5f, 0xd6, 0x0e,\n\t0x84, 0x10, 0x1b, 0x1b, 0x67, 0xa3, 0xcf, 0x60, 0xc1, 0x7c, 0x7c, 0x23,\n\t0x6e, 0x3f, 0x09, 0x21, 0xde, 0xae, 0x95, 0x8b, 0xff, 0xf7, 0xe0, 0x9e,\n\t0xc9, 0x6f, 0x23, 0x29, 0x12, 0x43, 0x3b, 0x6d, 0x8b, 0xef, 0x30, 0x70,\n\t0x3a, 0xdb, 0x00, 0xe9, 0x11, 0x4f, 0x9d, 0xa9, 0x7a, 0xe5, 0x6f, 0xa3,\n\t0x58, 0xab, 0xf9, 0xe5, 0x26, 0x62, 0xad, 0x3f, 0x08, 0xc0, 0xcd, 0xfe,\n\t0x29, 0xfd, 0xd5, 0x81, 0xdf, 0x85, 0x11, 0x1e, 0x1a, 0xfe, 0xa3, 0x48,\n\t0xa6, 0x04, 0xbb, 0xf3, 0x01, 0xdb, 0x3c, 0x08, 0xb1, 0x71, 0x71, 0x26,\n\t0xfc, 0x2c, 0x96, 0xf0, 0x21, 0x24, 0x63, 0xd7, 0xd0, 0xc6, 0xbf, 0x50,\n\t0x2b, 0x17, 0xbf, 0x7f, 0x60, 0xcf, 0xe4, 0xdf, 0xa6, 0x07, 0xc7, 0xd0,\n\t0x6e, 0x35, 0xfc, 0x3e, 0x2c, 0x00, 0x29, 0x97, 0xa6, 0x02, 0x44, 0x00,\n\t0x56, 0xf7, 0x4d, 0xd5, 0x1f, 0xd6, 0x46, 0x63, 0x71, 0xb5, 0x25, 0xb4,\n\t0x36, 0xf7, 0x61, 0x48, 0x11, 0x08, 0xd5, 0x2c, 0x4e, 0x57, 0x7e, 0x07,\n\t0xb1, 0x9c, 0xc0, 0x64, 0xef, 0x9b, 0x90, 0xe4, 0x83, 0x45, 0x60, 0xa3,\n\t0x18, 0xf8, 0x7a, 0x02, 0x67, 0xa3, 0x4f, 0x63, 0x15, 0xef, 0x4b, 0x7f,\n\t0x36, 0xb4, 0xf1, 0x2f, 0x56, 0x4b, 0x85, 0x6f, 0xce, 0x4e, 0x37, 0xfe,\n\t0x9f, 0x63, 0x59, 0x67, 0x86, 0xfd, 0x80, 0x9d, 0x0a, 0x0b, 0xc0, 0x3a,\n\t0x84, 0x10, 0x48, 0x4b, 0xbe, 0xaf, 0x1e, 0x98, 0x99, 0x7c, 0x98, 0x08,\n\t0x58, 0x5a, 0x6d, 0x7d, 0x59, 0x9b, 0xe1, 0x45, 0x20, 0x96, 0x53, 0x38,\n\t0x5b, 0xfe, 0x8f, 0x08, 0xd5, 0x2c, 0xf6, 0x74, 0xfe, 0x08, 0x96, 0x59,\n\t0x02, 0xcf, 0xba, 0x86, 0xc5, 0xa0, 0x15, 0x1d, 0xc2, 0x59, 0xfd, 0x79,\n\t0x74, 0x70, 0x23, 0x36, 0x60, 0xb7, 0x24, 0x80, 0xc5, 0x72, 0xd1, 0xfb,\n\t0xc6, 0xfe, 0x99, 0x89, 0x3f, 0x28, 0xb8, 0xce, 0x8b, 0xd8, 0xa5, 0x11,\n\t0xff, 0x2b, 0xc1, 0x02, 0x70, 0x05, 0x06, 0x45, 0xe0, 0xe0, 0x9e, 0xc9,\n\t0x87, 0x89, 0x08, 0x4b, 0xcd, 0xf6, 0x97, 0xcd, 0x06, 0x44, 0xc0, 0x88,\n\t0x22, 0x2e, 0x14, 0xff, 0x39, 0x02, 0x35, 0x8b, 0xd9, 0xf6, 0xff, 0x86,\n\t0x17, 0xbf, 0x0d, 0x16, 0x81, 0x2c, 0x10, 0x88, 0x24, 0x96, 0xa3, 0x3b,\n\t0x31, 0x67, 0x3e, 0x8f, 0x00, 0x53, 0x1b, 0x29, 0xc8, 0x42, 0x02, 0x58,\n\t0x2c, 0x15, 0xbd, 0x6f, 0xec, 0x9f, 0x66, 0xe3, 0xbf, 0x12, 0x2c, 0x00,\n\t0xef, 0xc2, 0xa0, 0x08, 0x1c, 0xda, 0x3b, 0xf5, 0x30, 0x80, 0x0d, 0x8a,\n\t0x00, 0x01, 0x90, 0x58, 0x75, 0x3f, 0x8d, 0x50, 0xee, 0xc5, 0xfe, 0xf6,\n\t0xff, 0x42, 0x25, 0x7c, 0xaa, 0xff, 0x5b, 0x46, 0x7d, 0x99, 0x39, 0x25,\n\t0x89, 0xf4, 0x5f, 0x88, 0x3e, 0x82, 0x79, 0x7a, 0x00, 0x1a, 0xc5, 0xeb,\n\t0x32, 0xfe, 0x03, 0x33, 0x93, 0x7f, 0x50, 0x2a, 0xb8, 0x6c, 0xfc, 0x57,\n\t0x80, 0x7b, 0xe2, 0x1a, 0xa4, 0x22, 0x00, 0x00, 0xf5, 0x13, 0xe7, 0x16,\n\t0x3e, 0xbf, 0xb2, 0xd6, 0xfe, 0xb2, 0x31, 0x34, 0xa4, 0x08, 0xf4, 0x91,\n\t0x70, 0xf4, 0x39, 0xec, 0xed, 0x7c, 0x05, 0x13, 0xfe, 0xf7, 0x20, 0xa9,\n\t0x8b, 0xd1, 0x7b, 0x03, 0x04, 0x14, 0xf7, 0x42, 0x4c, 0xdd, 0x0d, 0x88,\n\t0x3c, 0xd4, 0x3d, 0x34, 0xf0, 0x75, 0x03, 0x73, 0xd1, 0x27, 0xb1, 0x8a,\n\t0xf7, 0xa5, 0xd9, 0x7d, 0x1b, 0x73, 0xfb, 0x4b, 0x05, 0xef, 0x1b, 0x07,\n\t0xf6, 0xb0, 0xf1, 0x5f, 0x0d, 0xee, 0x8d, 0x6b, 0x30, 0x20, 0x00, 0x20,\n\t0xa2, 0xfa, 0xe9, 0x0b, 0x4b, 0x9f, 0x5f, 0x5e, 0x6b, 0x7f, 0x79, 0xf8,\n\t0xc0, 0x60, 0x1f, 0x09, 0x49, 0x3d, 0x4c, 0xf8, 0xdf, 0xc3, 0xde, 0xce,\n\t0x57, 0xe0, 0xe8, 0x39, 0x8c, 0x56, 0x04, 0xf2, 0x23, 0x00, 0x44, 0x84,\n\t0xb5, 0xf8, 0x08, 0xce, 0xea, 0xcf, 0xa1, 0x87, 0x83, 0xd8, 0x60, 0x9c,\n\t0x8e, 0x84, 0x10, 0x8b, 0x95, 0x74, 0xce, 0x5f, 0xf4, 0xd8, 0xf8, 0xaf,\n\t0x06, 0xf7, 0x48, 0x46, 0x06, 0x3d, 0x81, 0xb3, 0x0b, 0xcb, 0x9f, 0x5f,\n\t0x5c, 0x69, 0x7d, 0x39, 0xd6, 0x7a, 0x83, 0x22, 0x20, 0x00, 0x10, 0xca,\n\t0xd1, 0xf3, 0xd8, 0xd7, 0xfe, 0xfd, 0x74, 0x4a, 0x40, 0x18, 0xcd, 0xed,\n\t0xc8, 0x83, 0x00, 0x24, 0x2e, 0xff, 0x7c, 0x74, 0x1f, 0xe6, 0xe9, 0x41,\n\t0x44, 0xa8, 0x6e, 0xb4, 0x00, 0x6b, 0x24, 0x84, 0x38, 0x57, 0x2b, 0x17,\n\t0xbf, 0xb7, 0x7f, 0x66, 0xe2, 0x0f, 0x3d, 0xc7, 0x66, 0xe3, 0xbf, 0x06,\n\t0xdc, 0x2b, 0x43, 0x30, 0x28, 0x02, 0x17, 0x96, 0x9a, 0x9f, 0x9f, 0x5f,\n\t0x69, 0xfe, 0xfb, 0x30, 0x8a, 0xef, 0x05, 0xe0, 0x61, 0x03, 0x7d, 0x49,\n\t0x90, 0x70, 0xcc, 0x05, 0xec, 0xe9, 0xfc, 0x11, 0xa6, 0x7a, 0xdf, 0x82,\n\t0xa2, 0x0e, 0x68, 0xdb, 0xbd, 0x81, 0x51, 0x0b, 0x40, 0xb2, 0xc4, 0x37,\n\t0x17, 0x7d, 0x0a, 0xab, 0xb8, 0x27, 0x2d, 0xbf, 0xbe, 0xa1, 0x91, 0x3f,\n\t0x94, 0x42, 0x3c, 0x5f, 0xab, 0x14, 0xff, 0xfc, 0xc0, 0xcc, 0xe4, 0xf7,\n\t0x1d, 0xdb, 0x7a, 0x13, 0x6c, 0xfc, 0xd7, 0x84, 0x7b, 0x66, 0x48, 0x06,\n\t0x45, 0x60, 0x79, 0xad, 0xfd, 0xd0, 0xf9, 0xc5, 0xd5, 0xdf, 0xe8, 0x05,\n\t0xe1, 0xc7, 0x00, 0xcc, 0x60, 0x43, 0xfd, 0x29, 0x21, 0xc8, 0x47, 0x23,\n\t0x78, 0x0c, 0x33, 0xdd, 0x3f, 0x45, 0x31, 0x7a, 0x15, 0xdb, 0xeb, 0x0d,\n\t0x8c, 0x4a, 0x00, 0x0c, 0x8c, 0xb1, 0xb1, 0x1c, 0xdf, 0x86, 0x0b, 0xe6,\n\t0x13, 0xf0, 0x31, 0x8b, 0xeb, 0x58, 0x9a, 0x0f, 0x95, 0x94, 0xcf, 0xd5,\n\t0xab, 0xa5, 0xdf, 0x3f, 0x38, 0x33, 0xf9, 0x1d, 0xa5, 0xe4, 0x32, 0xd2,\n\t0x0d, 0x19, 0x6c, 0xfc, 0x57, 0x87, 0x7b, 0x67, 0x03, 0xf4, 0x8f, 0x86,\n\t0x07, 0x50, 0x6c, 0x75, 0x7a, 0xb7, 0x9d, 0x5d, 0x58, 0xf9, 0x97, 0x5d,\n\t0x3f, 0xf8, 0x75, 0x22, 0xda, 0x87, 0x0d, 0xf5, 0xa9, 0x00, 0x20, 0xe0,\n\t0xe8, 0x33, 0x98, 0xe9, 0xfe, 0x25, 0x26, 0xfc, 0xef, 0xc2, 0x36, 0xcb,\n\t0xdb, 0xe4, 0x0d, 0x6c, 0xb7, 0x00, 0x10, 0x40, 0x84, 0xae, 0xde, 0x83,\n\t0xf3, 0xf1, 0xc7, 0xd0, 0xc4, 0x51, 0x68, 0x78, 0xd7, 0x73, 0xe6, 0x42,\n\t0x60, 0x29, 0xf5, 0xfc, 0x54, 0xbd, 0xf2, 0xfb, 0xfb, 0x67, 0x26, 0xbe,\n\t0x8d, 0xf4, 0xe0, 0x17, 0x36, 0xfc, 0x6c, 0x70, 0x2f, 0x6d, 0x90, 0x01,\n\t0x4f, 0xc0, 0xea, 0x05, 0xe1, 0xad, 0x73, 0x0b, 0x2b, 0xff, 0x76, 0xad,\n\t0xdd, 0xfd, 0xa2, 0x21, 0xda, 0x0f, 0xc0, 0xd9, 0xd8, 0xa7, 0x4a, 0x08,\n\t0x44, 0xa8, 0x84, 0x4f, 0x63, 0x4f, 0xe7, 0x8f, 0x51, 0x0e, 0x9f, 0x81,\n\t0x40, 0x8c, 0xad, 0x0d, 0x12, 0x6e, 0xa7, 0x00, 0x18, 0xc4, 0xc6, 0xc3,\n\t0x52, 0x7c, 0x17, 0xe6, 0xcd, 0x03, 0x08, 0x31, 0x85, 0xeb, 0x18, 0xf5,\n\t0x09, 0xc0, 0x92, 0xeb, 0xd8, 0x3f, 0x9b, 0x69, 0x54, 0xff, 0x62, 0x66,\n\t0xa2, 0xf6, 0x30, 0x06, 0x72, 0xfb, 0x99, 0x6c, 0x70, 0x4f, 0x5d, 0x07,\n\t0x83, 0x22, 0x10, 0xc5, 0xfa, 0xc8, 0xd9, 0x85, 0xe5, 0xcf, 0xad, 0xac,\n\t0x75, 0xbe, 0x64, 0x8c, 0xe9, 0xc7, 0x05, 0x36, 0xf6, 0xb9, 0x90, 0xb0,\n\t0xcd, 0x22, 0x26, 0x7b, 0xdf, 0xc1, 0x54, 0xef, 0x6b, 0x70, 0xf5, 0x69,\n\t0xf4, 0xbd, 0x84, 0x2d, 0xb8, 0x8a, 0x6d, 0x10, 0x00, 0x02, 0x91, 0x40,\n\t0x3b, 0x3e, 0x88, 0xf3, 0xfa, 0x01, 0xb4, 0x70, 0xeb, 0x46, 0x97, 0xf7,\n\t0xfa, 0x18, 0x21, 0xb0, 0x50, 0x70, 0xdd, 0x6f, 0xee, 0x9b, 0xaa, 0xff,\n\t0x71, 0xbd, 0x52, 0x7a, 0x09, 0x49, 0x5d, 0x47, 0x36, 0xfe, 0x21, 0xe1,\n\t0xde, 0xba, 0x4e, 0x06, 0x44, 0x40, 0x12, 0x50, 0x9f, 0x9b, 0x5f, 0xfa,\n\t0xcc, 0x52, 0xb3, 0xf3, 0x5b, 0x51, 0x1c, 0xdf, 0x0b, 0xa0, 0x82, 0x0d,\n\t0xf7, 0xb1, 0x00, 0x60, 0x50, 0x88, 0x5f, 0xc7, 0x74, 0xf7, 0x6f, 0xd0,\n\t0xf0, 0x1f, 0x85, 0x45, 0xab, 0xa0, 0x4d, 0x17, 0x82, 0xad, 0x14, 0x80,\n\t0xa4, 0x6f, 0x7a, 0xf1, 0x04, 0x16, 0xf4, 0x7d, 0x58, 0xa1, 0x0f, 0x20,\n\t0x42, 0x79, 0xd8, 0x52, 0xdd, 0xeb, 0x09, 0xa5, 0x10, 0x6f, 0x54, 0x4a,\n\t0x85, 0x87, 0xf7, 0x4d, 0x35, 0xbe, 0x5a, 0x2a, 0xb8, 0x2f, 0x83, 0x83,\n\t0x7d, 0x1b, 0x86, 0x7b, 0x6c, 0x93, 0x18, 0x10, 0x82, 0xda, 0x52, 0xb3,\n\t0xf5, 0xe1, 0xf9, 0xe5, 0xe6, 0x17, 0x7b, 0x41, 0xf8, 0x05, 0x22, 0xcc,\n\t0xe2, 0xba, 0x7c, 0x78, 0x09, 0x41, 0x01, 0x2a, 0xe1, 0x53, 0x98, 0xe9,\n\t0xfe, 0x05, 0x2a, 0xd1, 0x33, 0x90, 0xe4, 0x6f, 0x62, 0x7c, 0x60, 0xab,\n\t0x04, 0xc0, 0x20, 0xd2, 0x45, 0x2c, 0xc7, 0x77, 0x61, 0x81, 0xee, 0x47,\n\t0x80, 0x3d, 0x97, 0x7e, 0xdf, 0xc6, 0x09, 0x2c, 0xa5, 0x9e, 0x6d, 0x54,\n\t0x4b, 0x7f, 0xb8, 0x6f, 0xaa, 0xf1, 0xa8, 0x6d, 0xa9, 0x39, 0xb0, 0xf1,\n\t0x5f, 0x17, 0xdc, 0x6b, 0x9b, 0xc8, 0x80, 0x08, 0x38, 0x5d, 0x3f, 0x78,\n\t0xcf, 0xf9, 0xa5, 0xd5, 0x7f, 0xb6, 0xd6, 0xee, 0xfd, 0xb2, 0x36, 0xe6,\n\t0x16, 0x00, 0x85, 0xeb, 0xfa, 0x6c, 0x48, 0x58, 0xb4, 0x8a, 0x86, 0xff,\n\t0x43, 0x4c, 0x75, 0xff, 0x1a, 0xc5, 0xf8, 0x18, 0x92, 0x40, 0xf7, 0xf5,\n\t0x0a, 0xc1, 0x66, 0x0b, 0x40, 0x12, 0xdd, 0x6f, 0xea, 0x9b, 0x30, 0xaf,\n\t0x3f, 0x8a, 0x36, 0x0e, 0x03, 0x1b, 0x5f, 0xda, 0xbb, 0xf4, 0xa1, 0xc0,\n\t0xb2, 0xe7, 0xd8, 0x4f, 0x4f, 0x37, 0xaa, 0x5f, 0x4d, 0xe7, 0xfb, 0x2b,\n\t0xfd, 0x0f, 0x65, 0xe3, 0xdf, 0x38, 0xdc, 0x73, 0x9b, 0xcc, 0x60, 0x5c,\n\t0x40, 0x1b, 0xb3, 0xf7, 0xc2, 0x52, 0xf3, 0xe3, 0xcb, 0xcd, 0xd6, 0x97,\n\t0x82, 0x28, 0xbe, 0x1f, 0xc0, 0x34, 0xae, 0xcb, 0x62, 0x93, 0xdb, 0xe5,\n\t0xea, 0x33, 0x68, 0xf8, 0x0f, 0x63, 0xc2, 0xff, 0x3e, 0xbc, 0xf8, 0x38,\n\t0xae, 0x4f, 0x08, 0x36, 0x4b, 0x00, 0x0c, 0x0c, 0x59, 0x68, 0xc7, 0x07,\n\t0xb1, 0xa0, 0xef, 0xc3, 0x1a, 0x6e, 0x87, 0x46, 0x61, 0x33, 0x4e, 0x54,\n\t0x0e, 0xa4, 0x10, 0x6f, 0x95, 0x8a, 0xde, 0xf7, 0xf7, 0x4e, 0xd6, 0xbf,\n\t0x5e, 0x2d, 0x15, 0x5e, 0x04, 0xd0, 0x02, 0xcf, 0xf7, 0x37, 0x05, 0xee,\n\t0xc1, 0x2d, 0x60, 0x30, 0x7d, 0x18, 0x40, 0x79, 0xb5, 0xd5, 0xb9, 0xe3,\n\t0xc2, 0x72, 0xf3, 0xd7, 0xbb, 0xbd, 0xe0, 0x9f, 0x18, 0xa2, 0x03, 0x00,\n\t0xec, 0xeb, 0xfb, 0x0d, 0x12, 0x80, 0x81, 0xab, 0x4f, 0x61, 0xc2, 0x7f,\n\t0x04, 0x0d, 0xff, 0x61, 0x78, 0xf1, 0x49, 0x6c, 0x4c, 0x08, 0xae, 0x57,\n\t0x00, 0x0c, 0x0c, 0x29, 0xb4, 0xe3, 0x03, 0x58, 0xd4, 0xf7, 0x62, 0x0d,\n\t0xb7, 0x23, 0x46, 0x79, 0x33, 0x0c, 0x9f, 0x00, 0x74, 0x6c, 0x4b, 0x3d,\n\t0x5b, 0xaf, 0x94, 0xfe, 0x78, 0xef, 0x64, 0xfd, 0x87, 0x8e, 0x6d, 0x9d,\n\t0x05, 0xbb, 0xfc, 0x9b, 0x0a, 0xf7, 0xe2, 0x16, 0x32, 0xe8, 0x0d, 0x04,\n\t0x51, 0x74, 0xe4, 0xc2, 0x52, 0xf3, 0x0b, 0xab, 0xad, 0xce, 0x17, 0xa3,\n\t0x58, 0xdf, 0x83, 0xeb, 0x0a, 0x10, 0xf6, 0x49, 0x84, 0xc0, 0xd3, 0x27,\n\t0xd1, 0xe8, 0x3d, 0x8c, 0x09, 0xff, 0x11, 0xb8, 0xfa, 0x14, 0x86, 0x13,\n\t0x82, 0x8d, 0x0a, 0x40, 0x62, 0xf8, 0x9d, 0x78, 0x16, 0x8b, 0xfa, 0x5e,\n\t0x34, 0x71, 0x67, 0x6a, 0xf8, 0x84, 0x4d, 0xa8, 0xb5, 0x11, 0x4b, 0x21,\n\t0xce, 0x17, 0x3d, 0xf7, 0xb1, 0xa9, 0x46, 0xf5, 0xeb, 0x93, 0xb5, 0xf2,\n\t0x4f, 0xc0, 0x2e, 0xff, 0x96, 0xc0, 0x3d, 0xb9, 0xc5, 0x0c, 0xae, 0x12,\n\t0x00, 0xa8, 0x2f, 0xae, 0xb6, 0x3e, 0xb4, 0xb0, 0xb2, 0xf6, 0xc5, 0x5e,\n\t0x10, 0x7e, 0x86, 0x92, 0x9c, 0x81, 0xeb, 0xf4, 0x06, 0xfa, 0x1f, 0xad,\n\t0xe1, 0xc5, 0x27, 0xd0, 0xf0, 0x1f, 0x41, 0x23, 0x78, 0x0c, 0x6e, 0x7c,\n\t0x02, 0x12, 0x51, 0x86, 0x55, 0x83, 0x61, 0x04, 0x20, 0x31, 0x6e, 0x6d,\n\t0x1c, 0x74, 0xf4, 0x2c, 0x96, 0xcd, 0x3d, 0x68, 0xd2, 0x9d, 0x69, 0x64,\n\t0xbf, 0xff, 0xef, 0xd7, 0x85, 0x01, 0xb0, 0x6c, 0x5b, 0xea, 0xb9, 0x46,\n\t0xb5, 0xfc, 0x83, 0x99, 0x46, 0xf5, 0x61, 0xd7, 0xb1, 0xdf, 0x40, 0x52,\n\t0x9b, 0x91, 0x5d, 0xfe, 0x2d, 0x80, 0x7b, 0x74, 0x9b, 0x18, 0x10, 0x02,\n\t0xb7, 0xe7, 0x87, 0x37, 0x9e, 0x5f, 0x5e, 0xfd, 0xc2, 0x5a, 0xa7, 0xf7,\n\t0xab, 0x71, 0xac, 0xdf, 0x87, 0x4d, 0xf1, 0x06, 0x80, 0xbe, 0x47, 0xe0,\n\t0xe8, 0x39, 0xd4, 0x82, 0x9f, 0xa0, 0xe1, 0xff, 0x00, 0xc5, 0xf8, 0x15,\n\t0x28, 0xea, 0x5d, 0x45, 0x08, 0xb2, 0x08, 0x40, 0x62, 0xf8, 0xb1, 0x29,\n\t0xa0, 0xa9, 0x6f, 0xc2, 0x8a, 0xbe, 0x1b, 0x6d, 0x1c, 0x41, 0x8c, 0xd2,\n\t0x66, 0x8d, 0xf8, 0x00, 0x10, 0x48, 0x29, 0xde, 0x2e, 0x7a, 0xee, 0xc3,\n\t0xd3, 0xf5, 0xea, 0x37, 0x26, 0x6a, 0xe5, 0x57, 0x00, 0x34, 0xb1, 0x0b,\n\t0x0f, 0xeb, 0xd8, 0x4e, 0xb8, 0x57, 0xb7, 0x91, 0xf5, 0xde, 0xc0, 0x52,\n\t0xb3, 0xf5, 0xa1, 0x85, 0x95, 0xd6, 0x17, 0x7b, 0x7e, 0xf0, 0x19, 0xb3,\n\t0x69, 0xde, 0x40, 0xff, 0xe3, 0x01, 0xcb, 0x2c, 0xa3, 0x12, 0x3e, 0x8d,\n\t0x86, 0xff, 0x08, 0x2a, 0xd1, 0xcf, 0xa1, 0xcc, 0xea, 0x65, 0xff, 0x9e,\n\t0xb6, 0xea, 0x2a, 0x02, 0x60, 0x40, 0x24, 0x10, 0x9a, 0x0a, 0x9a, 0xfa,\n\t0x36, 0x2c, 0x9b, 0x7b, 0xd0, 0xc5, 0x7e, 0x18, 0xb8, 0x9b, 0x69, 0xf8,\n\t0x31, 0x92, 0x8c, 0xbe, 0x5f, 0x34, 0x2a, 0xa5, 0xbf, 0x9a, 0x6e, 0x54,\n\t0x9f, 0x70, 0x6c, 0xeb, 0x4c, 0xfa, 0x73, 0x36, 0xfc, 0x2d, 0x86, 0x7b,\n\t0x77, 0x04, 0x0c, 0x7a, 0x03, 0x61, 0x14, 0xdf, 0x38, 0xbf, 0xd2, 0xfc,\n\t0xc2, 0xea, 0x5a, 0xe7, 0x73, 0x61, 0x14, 0xdf, 0x45, 0xc0, 0x14, 0x36,\n\t0x4d, 0x08, 0x04, 0x08, 0x12, 0x8a, 0xda, 0x28, 0x45, 0x2f, 0xa1, 0x16,\n\t0x3c, 0x81, 0x4a, 0xf8, 0x0c, 0x3c, 0x7d, 0x2a, 0xcd, 0x25, 0x48, 0x6f,\n\t0xff, 0x65, 0x02, 0xd0, 0x77, 0xf3, 0x5d, 0x74, 0xcd, 0x5e, 0x34, 0xf5,\n\t0x7b, 0xd0, 0xa4, 0x3b, 0xe0, 0x63, 0x0f, 0x08, 0x6a, 0x33, 0x82, 0x7b,\n\t0x17, 0xbb, 0x01, 0x40, 0x47, 0x29, 0xf9, 0x42, 0xb9, 0xe0, 0x3d, 0x3c,\n\t0x33, 0x51, 0x7b, 0xac, 0x5a, 0x2a, 0xbc, 0x84, 0x34, 0xc2, 0x0f, 0xb0,\n\t0xf1, 0x6f, 0x07, 0xdc, 0xc3, 0x23, 0x24, 0x15, 0x02, 0x09, 0xa0, 0xb6,\n\t0xd6, 0xe9, 0xdd, 0xb2, 0xb0, 0xb2, 0xf6, 0x60, 0xbb, 0xeb, 0x7f, 0x21,\n\t0xd6, 0x9b, 0x39, 0x2d, 0x00, 0xfa, 0x42, 0x20, 0x10, 0xc3, 0xd1, 0x17,\n\t0x50, 0x8a, 0x9e, 0x45, 0x2d, 0xf8, 0x3b, 0x94, 0xa3, 0x67, 0xe1, 0xe8,\n\t0x79, 0x88, 0xe2, 0x14, 0x30, 0x75, 0x37, 0x08, 0x2e, 0x7c, 0xdd, 0xc0,\n\t0x9a, 0x39, 0x82, 0x55, 0x73, 0x27, 0x7a, 0x98, 0xdd, 0xcc, 0xc0, 0xde,\n\t0xc5, 0xcb, 0x06, 0xd0, 0x95, 0x52, 0x9c, 0x2e, 0x7a, 0xee, 0x93, 0x93,\n\t0xb5, 0xca, 0xb7, 0x26, 0xaa, 0xe5, 0x27, 0xa5, 0x14, 0x2b, 0x60, 0x77,\n\t0x7f, 0xdb, 0xe1, 0x9e, 0x1e, 0x31, 0x97, 0xa5, 0x12, 0x13, 0xea, 0xcb,\n\t0x6b, 0xed, 0x0f, 0x2e, 0x37, 0x5b, 0xff, 0xb8, 0xd3, 0x0b, 0x3e, 0xaa,\n\t0x8d, 0xb9, 0x11, 0x40, 0x11, 0x9b, 0xba, 0x1b, 0x48, 0x82, 0x20, 0x20,\n\t0xa9, 0x07, 0x4f, 0x9f, 0x46, 0x25, 0xfc, 0x29, 0x6a, 0xe2, 0x55, 0x98,\n\t0xd2, 0x8d, 0x58, 0xa6, 0xf7, 0xa3, 0x4d, 0x87, 0x11, 0xa1, 0x91, 0x0a,\n\t0xc6, 0xa6, 0x1a, 0x3e, 0x00, 0xc4, 0x42, 0x88, 0x39, 0xd7, 0xb6, 0x9e,\n\t0xa8, 0x57, 0x4b, 0x8f, 0xcc, 0x34, 0x6a, 0x4f, 0xd9, 0x96, 0x3a, 0x83,\n\t0xf4, 0x14, 0x26, 0x36, 0xfc, 0xed, 0x87, 0x7b, 0x3c, 0x27, 0x0c, 0x4e,\n\t0x0b, 0xb4, 0x36, 0xb3, 0x0b, 0xab, 0x6b, 0xf7, 0x2e, 0xaf, 0xb5, 0x1f,\n\t0xf2, 0x83, 0xe8, 0xa3, 0x44, 0x74, 0x18, 0x89, 0x10, 0x6c, 0x22, 0x02,\n\t0x04, 0x01, 0x01, 0x40, 0x51, 0x0b, 0x10, 0x0a, 0x31, 0x8a, 0x5b, 0x61,\n\t0xf4, 0x40, 0x32, 0x9f, 0x5f, 0x76, 0x6d, 0xeb, 0xe5, 0x6a, 0xb9, 0xf8,\n\t0xc8, 0x54, 0xbd, 0xf2, 0x9d, 0xa2, 0xe7, 0x1e, 0x47, 0x62, 0xf8, 0x1c,\n\t0xdd, 0x1f, 0x21, 0xdc, 0xf3, 0x39, 0x63, 0xa0, 0xd6, 0x80, 0xeb, 0x07,\n\t0xd1, 0xbe, 0x85, 0xd5, 0xb5, 0xfb, 0xd7, 0x3a, 0xdd, 0x5f, 0x0f, 0xc2,\n\t0xf8, 0xc3, 0x44, 0x34, 0x89, 0x4d, 0x8b, 0x0f, 0x0c, 0xd2, 0x7f, 0x0c,\n\t0xb6, 0xc6, 0xf0, 0x6d, 0x4b, 0xbd, 0x5c, 0x2e, 0x7a, 0x8f, 0x4f, 0xd5,\n\t0xab, 0x8f, 0x57, 0x4b, 0x85, 0x57, 0x91, 0xac, 0xe9, 0x1b, 0x36, 0xfc,\n\t0xd1, 0xc3, 0x77, 0x20, 0xa7, 0x0c, 0x78, 0x04, 0xa5, 0xae, 0x1f, 0xdc,\n\t0xb1, 0xbc, 0xd6, 0xfe, 0xf4, 0x5a, 0xbb, 0xf7, 0x29, 0x3f, 0x8c, 0x8e,\n\t0x12, 0xd1, 0x04, 0x12, 0x21, 0xc8, 0xeb, 0xfd, 0xbb, 0x68, 0xf8, 0xa5,\n\t0x82, 0xf7, 0xf8, 0x64, 0xad, 0xfc, 0x78, 0xbd, 0x52, 0x7a, 0x15, 0xc0,\n\t0x2a, 0x78, 0x9e, 0x9f, 0x2b, 0xf8, 0x2e, 0xe4, 0x98, 0x01, 0x11, 0x50,\n\t0x00, 0xaa, 0xbd, 0x20, 0xbc, 0x6d, 0x69, 0xb5, 0xf5, 0xb1, 0x66, 0xbb,\n\t0xfb, 0x81, 0x20, 0x8a, 0xdf, 0x4b, 0x44, 0x37, 0x00, 0x28, 0x21, 0x3f,\n\t0xf7, 0xf1, 0x32, 0xc3, 0x9f, 0xa8, 0x95, 0x1f, 0x6f, 0xb0, 0xe1, 0xe7,\n\t0x1a, 0xbe, 0x1b, 0x63, 0xc0, 0x3a, 0x21, 0x28, 0xfb, 0x61, 0x34, 0xb3,\n\t0xb8, 0xda, 0xba, 0xa7, 0xd5, 0xe9, 0x3e, 0xe4, 0x87, 0xf1, 0x47, 0x4c,\n\t0x12, 0x2c, 0x1c, 0xa5, 0x10, 0xc4, 0x02, 0x58, 0xb6, 0x2c, 0xf5, 0x72,\n\t0xb9, 0xe0, 0x3d, 0xd6, 0xa8, 0x96, 0x9f, 0x68, 0x54, 0xd9, 0xf0, 0xc7,\n\t0x01, 0xbe, 0x2b, 0x63, 0xc6, 0x60, 0x8c, 0x20, 0x8c, 0xe3, 0x7d, 0x2b,\n\t0xcd, 0xce, 0xbd, 0xcd, 0x4e, 0xf7, 0xa1, 0x9e, 0x1f, 0xde, 0x1f, 0x6b,\n\t0xbd, 0x1f, 0x40, 0x15, 0x5b, 0x12, 0x27, 0x78, 0x67, 0x53, 0x00, 0xf4,\n\t0x84, 0x10, 0xcb, 0x8e, 0x6d, 0xbd, 0x5e, 0x2e, 0x78, 0x8f, 0x37, 0xaa,\n\t0xa5, 0x27, 0x6a, 0xe5, 0x22, 0x1b, 0xfe, 0x18, 0xc1, 0x77, 0x67, 0x4c,\n\t0x19, 0x14, 0x02, 0x63, 0xcc, 0xbe, 0x95, 0x56, 0xf7, 0xce, 0x95, 0xb5,\n\t0xf6, 0xdd, 0x5d, 0x3f, 0x78, 0x20, 0xd6, 0xfa, 0x2e, 0x22, 0x4c, 0x21,\n\t0x39, 0xfa, 0x6d, 0xb3, 0xef, 0xb1, 0x01, 0xd0, 0x93, 0x52, 0x9c, 0x72,\n\t0x6c, 0xfb, 0xa7, 0xd5, 0x62, 0xe1, 0xa7, 0x13, 0xb5, 0xf2, 0x8b, 0xa5,\n\t0x82, 0xfb, 0x3a, 0xd8, 0xf0, 0xc7, 0x0e, 0xbe, 0x4b, 0x63, 0xcc, 0xc0,\n\t0xd4, 0x40, 0x20, 0x99, 0x1e, 0x54, 0x9a, 0xed, 0xee, 0x2d, 0xcd, 0x76,\n\t0xf7, 0xe3, 0xed, 0xae, 0xff, 0xc1, 0x30, 0x8a, 0x6f, 0xd7, 0xc6, 0x1c,\n\t0x42, 0x32, 0x3d, 0xb8, 0x9e, 0x5c, 0x02, 0x42, 0xea, 0xe6, 0x2b, 0xa5,\n\t0xde, 0x28, 0x78, 0xce, 0x4b, 0xd5, 0x62, 0xe1, 0xc7, 0x8d, 0x6a, 0xe9,\n\t0x69, 0xd7, 0xb1, 0xe7, 0x90, 0x2c, 0xe7, 0x71, 0x19, 0xee, 0x31, 0x84,\n\t0xef, 0xd6, 0x0e, 0x61, 0x5d, 0x9c, 0xa0, 0x12, 0x46, 0xf1, 0x9e, 0x95,\n\t0x56, 0xe7, 0x68, 0xab, 0xd3, 0x7b, 0xb0, 0xeb, 0x07, 0x47, 0x63, 0x6d,\n\t0x0e, 0xa7, 0xcb, 0x88, 0x05, 0x64, 0x17, 0x83, 0xfe, 0x68, 0x7f, 0xda,\n\t0xb1, 0xac, 0xd7, 0xca, 0x45, 0xef, 0xa9, 0x6a, 0xb9, 0xf8, 0x77, 0xb5,\n\t0x72, 0xf1, 0xb8, 0x14, 0x62, 0x09, 0xe9, 0x3a, 0x3e, 0xc0, 0x86, 0x3f,\n\t0xae, 0xf0, 0x5d, 0xdb, 0x81, 0x0c, 0x4c, 0x0f, 0x1c, 0x22, 0x9a, 0x6a,\n\t0x75, 0x7a, 0x87, 0x56, 0x5a, 0x9d, 0x3b, 0x3b, 0xbd, 0xe0, 0xde, 0x30,\n\t0x8a, 0xef, 0xd7, 0xc6, 0x1c, 0x44, 0x92, 0x6a, 0x7c, 0xa5, 0x29, 0x82,\n\t0x01, 0xe0, 0x0b, 0x21, 0x56, 0x2c, 0x25, 0x4f, 0x14, 0x3d, 0xf7, 0xa5,\n\t0x72, 0xd1, 0xfb, 0x51, 0xbd, 0x52, 0x7a, 0xde, 0x73, 0xec, 0x73, 0x00,\n\t0xd6, 0xc0, 0x6e, 0xfe, 0x8e, 0x81, 0xef, 0xe0, 0x0e, 0x66, 0xdd, 0xee,\n\t0x43, 0x37, 0x8c, 0xe3, 0x3d, 0xcd, 0x56, 0xf7, 0xe8, 0x5a, 0xa7, 0x77,\n\t0x77, 0xcf, 0x0f, 0xee, 0x89, 0xb4, 0xbe, 0xc5, 0x18, 0xda, 0x0f, 0xa0,\n\t0x0c, 0x40, 0x08, 0x60, 0x49, 0x2a, 0xf9, 0x96, 0x63, 0x5b, 0x2f, 0x97,\n\t0x3c, 0xef, 0xb9, 0x7a, 0xa5, 0xf8, 0x62, 0xb5, 0x54, 0x38, 0x29, 0x84,\n\t0x58, 0x04, 0x67, 0xed, 0xed, 0x48, 0xf8, 0x6e, 0xee, 0x02, 0xd6, 0xc5,\n\t0x0a, 0x2c, 0x22, 0x94, 0x7b, 0x41, 0xb8, 0xa7, 0xd9, 0xee, 0xdc, 0xd2,\n\t0xee, 0xfa, 0x77, 0xf9, 0x61, 0x74, 0x97, 0x52, 0x52, 0x15, 0x5d, 0xf7,\n\t0xa9, 0x72, 0xd1, 0x7b, 0xb2, 0x56, 0x2e, 0x9c, 0xb0, 0x2d, 0x6b, 0x09,\n\t0x49, 0x21, 0x0e, 0x1e, 0xed, 0x77, 0x30, 0x7c, 0x57, 0x77, 0x21, 0x03,\n\t0x53, 0x04, 0x8b, 0x88, 0xca, 0x7e, 0x18, 0x4d, 0x2b, 0x29, 0x85, 0x63,\n\t0x5b, 0xf3, 0x60, 0x17, 0x7f, 0x57, 0xc1, 0x77, 0x78, 0x17, 0xb3, 0xae,\n\t0x78, 0xe9, 0x45, 0xd8, 0xf0, 0x19, 0x86, 0x61, 0x18, 0x86, 0x61, 0x18,\n\t0x86, 0x61, 0x18, 0x86, 0x61, 0x18, 0x86, 0x61, 0x18, 0x86, 0x61, 0x18,\n\t0x86, 0x61, 0x18, 0x86, 0x61, 0x18, 0x86, 0x61, 0x18, 0x86, 0x61, 0x18,\n\t0x86, 0x61, 0x18, 0x86, 0x61, 0x18, 0x86, 0x61, 0x18, 0x86, 0x61, 0x18,\n\t0x86, 0x61, 0x18, 0x86, 0x61, 0x18, 0x86, 0x61, 0x18, 0x86, 0x61, 0x18,\n\t0x86, 0x61, 0x18, 0x86, 0x61, 0x18, 0x86, 0x61, 0x18, 0x86, 0x61, 0x18,\n\t0x86, 0x61, 0x36, 0x8d, 0xff, 0x0f, 0x1e, 0x62, 0x21, 0xaf, 0x5c, 0x4d,\n\t0xc4, 0x15, 0x00, 0x00, 0x00, 0x00, 0x49, 0x45, 0x4e, 0x44, 0xae, 0x42,\n\t0x60, 0x82,\n}\n","avg_line_length":72.7949438202,"max_line_length":72,"alphanum_fraction":0.6575232215} +{"size":2037,"ext":"go","lang":"Go","max_stars_count":246.0,"content":"package cabf_br\n\n\/*\n * ZLint Copyright 2021 Regents of the University of Michigan\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy\n * of the License at http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n * implied. See the License for the specific language governing\n * permissions and limitations under the License.\n *\/\n\n\/\/ CHANGE THIS COMMENT TO MATCH SOURCE TEXT\n\nimport (\n\t\"crypto\/rsa\"\n\n\t\"github.com\/zmap\/zcrypto\/x509\"\n\t\"github.com\/zmap\/zlint\/v3\/lint\"\n\t\"github.com\/zmap\/zlint\/v3\/util\"\n)\n\ntype subCaModSize struct{}\n\nfunc init() {\n\tlint.RegisterLint(&lint.Lint{\n\t\tName: \"e_old_sub_ca_rsa_mod_less_than_1024_bits\",\n\t\tDescription: \"In a validity period beginning on or before 31 Dec 2010 and ending on or before 31 Dec 2013, subordinate CA certificates using RSA public key algorithm MUST use a 1024 bit modulus\",\n\t\tCitation: \"BRs: 6.1.5\",\n\t\tSource: lint.CABFBaselineRequirements,\n\t\t\/\/ since effective date should be checked against end date in this specific case, putting time check into checkApplies instead, ZeroDate here to automatically pass NE test\n\t\tEffectiveDate: util.ZeroDate,\n\t\tLint: NewSubCaModSize,\n\t})\n}\n\nfunc NewSubCaModSize() lint.LintInterface {\n\treturn &subCaModSize{}\n}\n\nfunc (l *subCaModSize) CheckApplies(c *x509.Certificate) bool {\n\tissueDate := c.NotBefore\n\tendDate := c.NotAfter\n\t_, ok := c.PublicKey.(*rsa.PublicKey)\n\treturn ok && util.IsSubCA(c) && issueDate.Before(util.NoRSA1024RootDate) && endDate.Before(util.NoRSA1024Date)\n}\n\nfunc (l *subCaModSize) Execute(c *x509.Certificate) *lint.LintResult {\n\tkey := c.PublicKey.(*rsa.PublicKey)\n\tif key.N.BitLen() < 1024 {\n\t\treturn &lint.LintResult{Status: lint.Error}\n\t} else {\n\t\treturn &lint.LintResult{Status: lint.Pass}\n\t}\n}\n","avg_line_length":33.95,"max_line_length":197,"alphanum_fraction":0.7393225331} +{"size":18049,"ext":"go","lang":"Go","max_stars_count":null,"content":"\/*\nCopyright 2016 The Kubernetes Authors.\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\nyou may not use this file except in compliance with the License.\nYou may obtain a copy of the License at\n\n http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\nUnless required by applicable law or agreed to in writing, software\ndistributed under the License is distributed on an \"AS IS\" BASIS,\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\nSee the License for the specific language governing permissions and\nlimitations under the License.\n*\/\n\npackage remote\n\nimport (\n\t\"context\"\n\t\"errors\"\n\t\"fmt\"\n\t\"strings\"\n\t\"sync\"\n\t\"time\"\n\n\t\"google.golang.org\/grpc\"\n\t\"k8s.io\/klog\"\n\n\tinternalapi \"k8s.io\/kubernetes\/pkg\/kubelet\/apis\/cri\"\n\truntimeapi \"k8s.io\/kubernetes\/pkg\/kubelet\/apis\/cri\/runtime\/v1alpha2\"\n\t\"k8s.io\/kubernetes\/pkg\/kubelet\/util\"\n\tutilexec \"k8s.io\/utils\/exec\"\n)\n\n\/\/ RemoteRuntimeService is a gRPC implementation of internalapi.RuntimeService.\ntype RemoteRuntimeService struct {\n\ttimeout time.Duration\n\truntimeClient runtimeapi.RuntimeServiceClient\n\t\/\/ Cache last per-container error message to reduce log spam\n\tlastError map[string]string\n\t\/\/ Time last per-container error message was printed\n\terrorPrinted map[string]time.Time\n\terrorMapLock sync.Mutex\n}\n\nconst (\n\t\/\/ How frequently to report identical errors\n\tidenticalErrorDelay = 1 * time.Minute\n)\n\n\/\/ NewRemoteRuntimeService creates a new internalapi.RuntimeService.\nfunc NewRemoteRuntimeService(endpoint string, connectionTimeout time.Duration) (internalapi.RuntimeService, error) {\n\tklog.V(3).Infof(\"Connecting to runtime service %s\", endpoint)\n\taddr, dailer, err := util.GetAddressAndDialer(endpoint)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tctx, cancel := context.WithTimeout(context.Background(), connectionTimeout)\n\tdefer cancel()\n\n\tconn, err := grpc.DialContext(ctx, addr, grpc.WithInsecure(), grpc.WithDialer(dailer), grpc.WithDefaultCallOptions(grpc.MaxCallRecvMsgSize(maxMsgSize)))\n\tif err != nil {\n\t\tklog.Errorf(\"Connect remote runtime %s failed: %v\", addr, err)\n\t\treturn nil, err\n\t}\n\n\treturn &RemoteRuntimeService{\n\t\ttimeout: connectionTimeout,\n\t\truntimeClient: runtimeapi.NewRuntimeServiceClient(conn),\n\t\tlastError: make(map[string]string),\n\t\terrorPrinted: make(map[string]time.Time),\n\t}, nil\n}\n\n\/\/ Version returns the runtime name, runtime version and runtime API version.\nfunc (r *RemoteRuntimeService) Version(apiVersion string) (*runtimeapi.VersionResponse, error) {\n\tctx, cancel := getContextWithTimeout(r.timeout)\n\tdefer cancel()\n\n\ttypedVersion, err := r.runtimeClient.Version(ctx, &runtimeapi.VersionRequest{\n\t\tVersion: apiVersion,\n\t})\n\tif err != nil {\n\t\tklog.Errorf(\"Version from runtime service failed: %v\", err)\n\t\treturn nil, err\n\t}\n\n\tif typedVersion.Version == \"\" || typedVersion.RuntimeName == \"\" || typedVersion.RuntimeApiVersion == \"\" || typedVersion.RuntimeVersion == \"\" {\n\t\treturn nil, fmt.Errorf(\"not all fields are set in VersionResponse (%q)\", *typedVersion)\n\t}\n\n\treturn typedVersion, err\n}\n\n\/\/ RunPodSandbox creates and starts a pod-level sandbox. Runtimes should ensure\n\/\/ the sandbox is in ready state.\nfunc (r *RemoteRuntimeService) RunPodSandbox(config *runtimeapi.PodSandboxConfig, runtimeHandler string) (string, error) {\n\t\/\/ Use 2 times longer timeout for sandbox operation (4 mins by default)\n\t\/\/ TODO: Make the pod sandbox timeout configurable.\n\tctx, cancel := getContextWithTimeout(r.timeout * 2)\n\tdefer cancel()\n\n\tresp, err := r.runtimeClient.RunPodSandbox(ctx, &runtimeapi.RunPodSandboxRequest{\n\t\tConfig: config,\n\t\tRuntimeHandler: runtimeHandler,\n\t})\n\tif err != nil {\n\t\tklog.Errorf(\"RunPodSandbox from runtime service failed: %v\", err)\n\t\treturn \"\", err\n\t}\n\n\tif resp.PodSandboxId == \"\" {\n\t\terrorMessage := fmt.Sprintf(\"PodSandboxId is not set for sandbox %q\", config.GetMetadata())\n\t\tklog.Errorf(\"RunPodSandbox failed: %s\", errorMessage)\n\t\treturn \"\", errors.New(errorMessage)\n\t}\n\n\treturn resp.PodSandboxId, nil\n}\n\n\/\/ StopPodSandbox stops the sandbox. If there are any running containers in the\n\/\/ sandbox, they should be forced to termination.\nfunc (r *RemoteRuntimeService) StopPodSandbox(podSandBoxID string) error {\n\tctx, cancel := getContextWithTimeout(r.timeout)\n\tdefer cancel()\n\n\t_, err := r.runtimeClient.StopPodSandbox(ctx, &runtimeapi.StopPodSandboxRequest{\n\t\tPodSandboxId: podSandBoxID,\n\t})\n\tif err != nil {\n\t\tklog.Errorf(\"StopPodSandbox %q from runtime service failed: %v\", podSandBoxID, err)\n\t\treturn err\n\t}\n\n\treturn nil\n}\n\n\/\/ RemovePodSandbox removes the sandbox. If there are any containers in the\n\/\/ sandbox, they should be forcibly removed.\nfunc (r *RemoteRuntimeService) RemovePodSandbox(podSandBoxID string) error {\n\tctx, cancel := getContextWithTimeout(r.timeout)\n\tdefer cancel()\n\n\t_, err := r.runtimeClient.RemovePodSandbox(ctx, &runtimeapi.RemovePodSandboxRequest{\n\t\tPodSandboxId: podSandBoxID,\n\t})\n\tif err != nil {\n\t\tklog.Errorf(\"RemovePodSandbox %q from runtime service failed: %v\", podSandBoxID, err)\n\t\treturn err\n\t}\n\n\treturn nil\n}\n\n\/\/ PodSandboxStatus returns the status of the PodSandbox.\nfunc (r *RemoteRuntimeService) PodSandboxStatus(podSandBoxID string) (*runtimeapi.PodSandboxStatus, error) {\n\tctx, cancel := getContextWithTimeout(r.timeout)\n\tdefer cancel()\n\n\tresp, err := r.runtimeClient.PodSandboxStatus(ctx, &runtimeapi.PodSandboxStatusRequest{\n\t\tPodSandboxId: podSandBoxID,\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif resp.Status != nil {\n\t\tif err := verifySandboxStatus(resp.Status); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\treturn resp.Status, nil\n}\n\n\/\/ ListPodSandbox returns a list of PodSandboxes.\nfunc (r *RemoteRuntimeService) ListPodSandbox(filter *runtimeapi.PodSandboxFilter) ([]*runtimeapi.PodSandbox, error) {\n\tctx, cancel := getContextWithTimeout(r.timeout)\n\tdefer cancel()\n\n\tresp, err := r.runtimeClient.ListPodSandbox(ctx, &runtimeapi.ListPodSandboxRequest{\n\t\tFilter: filter,\n\t})\n\tif err != nil {\n\t\tklog.Errorf(\"ListPodSandbox with filter %+v from runtime service failed: %v\", filter, err)\n\t\treturn nil, err\n\t}\n\n\treturn resp.Items, nil\n}\n\n\/\/ CreateContainer creates a new container in the specified PodSandbox.\nfunc (r *RemoteRuntimeService) CreateContainer(podSandBoxID string, config *runtimeapi.ContainerConfig, sandboxConfig *runtimeapi.PodSandboxConfig) (string, error) {\n\tctx, cancel := getContextWithTimeout(r.timeout)\n\tdefer cancel()\n\n\tresp, err := r.runtimeClient.CreateContainer(ctx, &runtimeapi.CreateContainerRequest{\n\t\tPodSandboxId: podSandBoxID,\n\t\tConfig: config,\n\t\tSandboxConfig: sandboxConfig,\n\t})\n\tif err != nil {\n\t\tklog.Errorf(\"CreateContainer in sandbox %q from runtime service failed: %v\", podSandBoxID, err)\n\t\treturn \"\", err\n\t}\n\n\tif resp.ContainerId == \"\" {\n\t\terrorMessage := fmt.Sprintf(\"ContainerId is not set for container %q\", config.GetMetadata())\n\t\tklog.Errorf(\"CreateContainer failed: %s\", errorMessage)\n\t\treturn \"\", errors.New(errorMessage)\n\t}\n\n\treturn resp.ContainerId, nil\n}\n\n\/\/ StartContainer starts the container.\nfunc (r *RemoteRuntimeService) StartContainer(containerID string) error {\n\tctx, cancel := getContextWithTimeout(r.timeout)\n\tdefer cancel()\n\n\t_, err := r.runtimeClient.StartContainer(ctx, &runtimeapi.StartContainerRequest{\n\t\tContainerId: containerID,\n\t})\n\tif err != nil {\n\t\tklog.Errorf(\"StartContainer %q from runtime service failed: %v\", containerID, err)\n\t\treturn err\n\t}\n\n\treturn nil\n}\n\n\/\/ StopContainer stops a running container with a grace period (i.e., timeout).\nfunc (r *RemoteRuntimeService) StopContainer(containerID string, timeout int64) error {\n\t\/\/ Use timeout + default timeout (2 minutes) as timeout to leave extra time\n\t\/\/ for SIGKILL container and request latency.\n\tt := r.timeout + time.Duration(timeout)*time.Second\n\tctx, cancel := getContextWithTimeout(t)\n\tdefer cancel()\n\n\tr.errorMapLock.Lock()\n\tdelete(r.lastError, containerID)\n\tdelete(r.errorPrinted, containerID)\n\tr.errorMapLock.Unlock()\n\t_, err := r.runtimeClient.StopContainer(ctx, &runtimeapi.StopContainerRequest{\n\t\tContainerId: containerID,\n\t\tTimeout: timeout,\n\t})\n\tif err != nil {\n\t\tklog.Errorf(\"StopContainer %q from runtime service failed: %v\", containerID, err)\n\t\treturn err\n\t}\n\n\treturn nil\n}\n\n\/\/ RemoveContainer removes the container. If the container is running, the container\n\/\/ should be forced to removal.\nfunc (r *RemoteRuntimeService) RemoveContainer(containerID string) error {\n\tctx, cancel := getContextWithTimeout(r.timeout)\n\tdefer cancel()\n\n\tr.errorMapLock.Lock()\n\tdelete(r.lastError, containerID)\n\tdelete(r.errorPrinted, containerID)\n\tr.errorMapLock.Unlock()\n\t_, err := r.runtimeClient.RemoveContainer(ctx, &runtimeapi.RemoveContainerRequest{\n\t\tContainerId: containerID,\n\t})\n\tif err != nil {\n\t\tklog.Errorf(\"RemoveContainer %q from runtime service failed: %v\", containerID, err)\n\t\treturn err\n\t}\n\n\treturn nil\n}\n\n\/\/ ListContainers lists containers by filters.\nfunc (r *RemoteRuntimeService) ListContainers(filter *runtimeapi.ContainerFilter) ([]*runtimeapi.Container, error) {\n\tctx, cancel := getContextWithTimeout(r.timeout)\n\tdefer cancel()\n\n\tresp, err := r.runtimeClient.ListContainers(ctx, &runtimeapi.ListContainersRequest{\n\t\tFilter: filter,\n\t})\n\tif err != nil {\n\t\tklog.Errorf(\"ListContainers with filter %+v from runtime service failed: %v\", filter, err)\n\t\treturn nil, err\n\t}\n\n\treturn resp.Containers, nil\n}\n\n\/\/ Clean up any expired last-error timers\nfunc (r *RemoteRuntimeService) cleanupErrorTimeouts() {\n\tr.errorMapLock.Lock()\n\tdefer r.errorMapLock.Unlock()\n\tfor ID, timeout := range r.errorPrinted {\n\t\tif time.Now().Sub(timeout) >= identicalErrorDelay {\n\t\t\tdelete(r.lastError, ID)\n\t\t\tdelete(r.errorPrinted, ID)\n\t\t}\n\t}\n}\n\n\/\/ ContainerStatus returns the container status.\nfunc (r *RemoteRuntimeService) ContainerStatus(containerID string) (*runtimeapi.ContainerStatus, error) {\n\tctx, cancel := getContextWithTimeout(r.timeout)\n\tdefer cancel()\n\n\tresp, err := r.runtimeClient.ContainerStatus(ctx, &runtimeapi.ContainerStatusRequest{\n\t\tContainerId: containerID,\n\t})\n\tr.cleanupErrorTimeouts()\n\tr.errorMapLock.Lock()\n\tdefer r.errorMapLock.Unlock()\n\tif err != nil {\n\t\t\/\/ Don't spam the log with endless messages about the same failure.\n\t\tlastMsg, ok := r.lastError[containerID]\n\t\tif !ok || err.Error() != lastMsg || time.Now().Sub(r.errorPrinted[containerID]) >= identicalErrorDelay {\n\t\t\tklog.Errorf(\"ContainerStatus %q from runtime service failed: %v\", containerID, err)\n\t\t\tr.errorPrinted[containerID] = time.Now()\n\t\t\tr.lastError[containerID] = err.Error()\n\t\t}\n\t\treturn nil, err\n\t}\n\tdelete(r.lastError, containerID)\n\tdelete(r.errorPrinted, containerID)\n\n\tif resp.Status != nil {\n\t\tif err := verifyContainerStatus(resp.Status); err != nil {\n\t\t\tklog.Errorf(\"ContainerStatus of %q failed: %v\", containerID, err)\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\treturn resp.Status, nil\n}\n\n\/\/ UpdateContainerResources updates a containers resource config\nfunc (r *RemoteRuntimeService) UpdateContainerResources(containerID string, resources *runtimeapi.LinuxContainerResources) error {\n\tctx, cancel := getContextWithTimeout(r.timeout)\n\tdefer cancel()\n\n\t_, err := r.runtimeClient.UpdateContainerResources(ctx, &runtimeapi.UpdateContainerResourcesRequest{\n\t\tContainerId: containerID,\n\t\tLinux: resources,\n\t})\n\tif err != nil {\n\t\tklog.Errorf(\"UpdateContainerResources %q from runtime service failed: %v\", containerID, err)\n\t\treturn err\n\t}\n\n\treturn nil\n}\n\n\/\/ ExecSync executes a command in the container, and returns the stdout output.\n\/\/ If command exits with a non-zero exit code, an error is returned.\nfunc (r *RemoteRuntimeService) ExecSync(containerID string, cmd []string, timeout time.Duration) (stdout []byte, stderr []byte, err error) {\n\t\/\/ Do not set timeout when timeout is 0.\n\tfmt.Println(\"2222222222222222222222222222222222222222222222222\")\n\tvar ctx context.Context\n\tvar cancel context.CancelFunc\n\tif timeout != 0 {\n\t\t\/\/ Use timeout + default timeout (2 minutes) as timeout to leave some time for\n\t\t\/\/ the runtime to do cleanup.\n\t\tfmt.Println(\"55555555555555555555555555555555555555555555555\")\n\t\tctx, cancel = getContextWithTimeout(r.timeout + timeout)\n\t} else {\n\t\tfmt.Println(\"6666666666666666666666666666666666666666666666\")\n\t\tctx, cancel = getContextWithCancel()\n\t}\n\tdefer cancel()\n\n\ttimeoutSeconds := int64(timeout.Seconds())\n\treq := &runtimeapi.ExecSyncRequest{\n\t\tContainerId: containerID,\n\t\tCmd: cmd,\n\t\tTimeout: timeoutSeconds,\n\t}\n\tresp, err := r.runtimeClient.ExecSync(ctx, req)\n\tif err != nil {\n\t\tfmt.Println(\"333333333333333333333333333333333333333333333333\")\n\t\tklog.Errorf(\"ExecSync %s '%s' from runtime service failed: %v\", containerID, strings.Join(cmd, \" \"), err)\n\t\treturn nil, nil, err\n\t}\n\n\terr = nil\n\tif resp.ExitCode != 0 {\n\t\tfmt.Println(\"4444444444444444444444444444444444444444444444444\")\n\t\terr = utilexec.CodeExitError{\n\t\t\tErr: fmt.Errorf(\"command '%s' exited with %d: %s\", strings.Join(cmd, \" \"), resp.ExitCode, resp.Stderr),\n\t\t\tCode: int(resp.ExitCode),\n\t\t}\n\t}\n\n\treturn resp.Stdout, resp.Stderr, err\n}\n\n\/\/ Exec prepares a streaming endpoint to execute a command in the container, and returns the address.\nfunc (r *RemoteRuntimeService) Exec(req *runtimeapi.ExecRequest) (*runtimeapi.ExecResponse, error) {\n\tctx, cancel := getContextWithTimeout(r.timeout)\n\tdefer cancel()\n\n\tresp, err := r.runtimeClient.Exec(ctx, req)\n\tif err != nil {\n\t\tklog.Errorf(\"Exec %s '%s' from runtime service failed: %v\", req.ContainerId, strings.Join(req.Cmd, \" \"), err)\n\t\treturn nil, err\n\t}\n\n\tif resp.Url == \"\" {\n\t\terrorMessage := \"URL is not set\"\n\t\tklog.Errorf(\"Exec failed: %s\", errorMessage)\n\t\treturn nil, errors.New(errorMessage)\n\t}\n\n\treturn resp, nil\n}\n\n\/\/ Attach prepares a streaming endpoint to attach to a running container, and returns the address.\nfunc (r *RemoteRuntimeService) Attach(req *runtimeapi.AttachRequest) (*runtimeapi.AttachResponse, error) {\n\tctx, cancel := getContextWithTimeout(r.timeout)\n\tdefer cancel()\n\n\tresp, err := r.runtimeClient.Attach(ctx, req)\n\tif err != nil {\n\t\tklog.Errorf(\"Attach %s from runtime service failed: %v\", req.ContainerId, err)\n\t\treturn nil, err\n\t}\n\n\tif resp.Url == \"\" {\n\t\terrorMessage := \"URL is not set\"\n\t\tklog.Errorf(\"Exec failed: %s\", errorMessage)\n\t\treturn nil, errors.New(errorMessage)\n\t}\n\treturn resp, nil\n}\n\n\/\/ PortForward prepares a streaming endpoint to forward ports from a PodSandbox, and returns the address.\nfunc (r *RemoteRuntimeService) PortForward(req *runtimeapi.PortForwardRequest) (*runtimeapi.PortForwardResponse, error) {\n\tctx, cancel := getContextWithTimeout(r.timeout)\n\tdefer cancel()\n\n\tresp, err := r.runtimeClient.PortForward(ctx, req)\n\tif err != nil {\n\t\tklog.Errorf(\"PortForward %s from runtime service failed: %v\", req.PodSandboxId, err)\n\t\treturn nil, err\n\t}\n\n\tif resp.Url == \"\" {\n\t\terrorMessage := \"URL is not set\"\n\t\tklog.Errorf(\"Exec failed: %s\", errorMessage)\n\t\treturn nil, errors.New(errorMessage)\n\t}\n\n\treturn resp, nil\n}\n\n\/\/ UpdateRuntimeConfig updates the config of a runtime service. The only\n\/\/ update payload currently supported is the pod CIDR assigned to a node,\n\/\/ and the runtime service just proxies it down to the network plugin.\nfunc (r *RemoteRuntimeService) UpdateRuntimeConfig(runtimeConfig *runtimeapi.RuntimeConfig) error {\n\tctx, cancel := getContextWithTimeout(r.timeout)\n\tdefer cancel()\n\n\t\/\/ Response doesn't contain anything of interest. This translates to an\n\t\/\/ Event notification to the network plugin, which can't fail, so we're\n\t\/\/ really looking to surface destination unreachable.\n\t_, err := r.runtimeClient.UpdateRuntimeConfig(ctx, &runtimeapi.UpdateRuntimeConfigRequest{\n\t\tRuntimeConfig: runtimeConfig,\n\t})\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}\n\n\/\/ Status returns the status of the runtime.\nfunc (r *RemoteRuntimeService) Status() (*runtimeapi.RuntimeStatus, error) {\n\tctx, cancel := getContextWithTimeout(r.timeout)\n\tdefer cancel()\n\n\tresp, err := r.runtimeClient.Status(ctx, &runtimeapi.StatusRequest{})\n\tif err != nil {\n\t\tklog.Errorf(\"Status from runtime service failed: %v\", err)\n\t\treturn nil, err\n\t}\n\n\tif resp.Status == nil || len(resp.Status.Conditions) < 2 {\n\t\terrorMessage := \"RuntimeReady or NetworkReady condition are not set\"\n\t\tklog.Errorf(\"Status failed: %s\", errorMessage)\n\t\treturn nil, errors.New(errorMessage)\n\t}\n\n\treturn resp.Status, nil\n}\n\n\/\/ ContainerStats returns the stats of the container.\nfunc (r *RemoteRuntimeService) ContainerStats(containerID string) (*runtimeapi.ContainerStats, error) {\n\tctx, cancel := getContextWithTimeout(r.timeout)\n\tdefer cancel()\n\n\tresp, err := r.runtimeClient.ContainerStats(ctx, &runtimeapi.ContainerStatsRequest{\n\t\tContainerId: containerID,\n\t})\n\tr.cleanupErrorTimeouts()\n\tr.errorMapLock.Lock()\n\tdefer r.errorMapLock.Unlock()\n\tif err != nil {\n\t\tlastMsg, ok := r.lastError[containerID]\n\t\tif !ok || err.Error() != lastMsg || time.Now().Sub(r.errorPrinted[containerID]) >= identicalErrorDelay {\n\t\t\tklog.Errorf(\"ContainerStatus %q from runtime service failed: %v\", containerID, err)\n\t\t\tr.errorPrinted[containerID] = time.Now()\n\t\t\tr.lastError[containerID] = err.Error()\n\t\t}\n\t\treturn nil, err\n\t}\n\tdelete(r.lastError, containerID)\n\tdelete(r.errorPrinted, containerID)\n\n\treturn resp.GetStats(), nil\n}\n\nfunc (r *RemoteRuntimeService) ListContainerStats(filter *runtimeapi.ContainerStatsFilter) ([]*runtimeapi.ContainerStats, error) {\n\t\/\/ Do not set timeout, because writable layer stats collection takes time.\n\t\/\/ TODO(random-liu): Should we assume runtime should cache the result, and set timeout here?\n\tctx, cancel := getContextWithCancel()\n\tdefer cancel()\n\n\tresp, err := r.runtimeClient.ListContainerStats(ctx, &runtimeapi.ListContainerStatsRequest{\n\t\tFilter: filter,\n\t})\n\tif err != nil {\n\t\tklog.Errorf(\"ListContainerStats with filter %+v from runtime service failed: %v\", filter, err)\n\t\treturn nil, err\n\t}\n\n\treturn resp.GetStats(), nil\n}\n\nfunc (r *RemoteRuntimeService) ReopenContainerLog(containerID string) error {\n\tctx, cancel := getContextWithTimeout(r.timeout)\n\tdefer cancel()\n\n\t_, err := r.runtimeClient.ReopenContainerLog(ctx, &runtimeapi.ReopenContainerLogRequest{ContainerId: containerID})\n\tif err != nil {\n\t\tklog.Errorf(\"ReopenContainerLog %q from runtime service failed: %v\", containerID, err)\n\t\treturn err\n\t}\n\treturn nil\n}\n","avg_line_length":32.5794223827,"max_line_length":165,"alphanum_fraction":0.7428666408} +{"size":7410,"ext":"go","lang":"Go","max_stars_count":21.0,"content":"\/\/ Copyright 2013 The Go Authors. All rights reserved.\n\/\/ Use of this source code is governed by a BSD-style\n\/\/ license that can be found in the LICENSE file.\n\n\/\/ This file implements LinkifyText which introduces\n\/\/ links for identifiers pointing to their declarations.\n\/\/ The approach does not cover all cases because godoc\n\/\/ doesn't have complete type information, but it's\n\/\/ reasonably good for browsing.\n\npackage godoc\n\nimport (\n\t\"fmt\"\n\t\"go\/ast\"\n\t\"go\/token\"\n\t\"io\"\n\t\"strconv\"\n)\n\n\/\/ LinkifyText HTML-escapes source text and writes it to w.\n\/\/ Identifiers that are in a \"use\" position (i.e., that are\n\/\/ not being declared), are wrapped with HTML links pointing\n\/\/ to the respective declaration, if possible. Comments are\n\/\/ formatted the same way as with FormatText.\n\/\/\nfunc LinkifyText(w io.Writer, text []byte, n ast.Node) {\n\tlinks := linksFor(n)\n\n\ti := 0 \/\/ links index\n\tprev := \"\" \/\/ prev HTML tag\n\tlinkWriter := func(w io.Writer, _ int, start bool) {\n\t\t\/\/ end tag\n\t\tif !start {\n\t\t\tif prev != \"\" {\n\t\t\t\tfmt.Fprintf(w, `<\/%s>`, prev)\n\t\t\t\tprev = \"\"\n\t\t\t}\n\t\t\treturn\n\t\t}\n\n\t\t\/\/ start tag\n\t\tprev = \"\"\n\t\tif i < len(links) {\n\t\t\tswitch info := links[i]; {\n\t\t\tcase info.path != \"\" && info.name == \"\":\n\t\t\t\t\/\/ package path\n\t\t\t\tfmt.Fprintf(w, ``, info.path)\n\t\t\t\tprev = \"a\"\n\t\t\tcase info.path != \"\" && info.name != \"\":\n\t\t\t\t\/\/ qualified identifier\n\t\t\t\tfmt.Fprintf(w, ``, info.path, info.name)\n\t\t\t\tprev = \"a\"\n\t\t\tcase info.path == \"\" && info.name != \"\":\n\t\t\t\t\/\/ local identifier\n\t\t\t\tif info.mode == identVal {\n\t\t\t\t\tfmt.Fprintf(w, ``, info.name)\n\t\t\t\t\tprev = \"span\"\n\t\t\t\t} else if ast.IsExported(info.name) {\n\t\t\t\t\tfmt.Fprintf(w, ``, info.name)\n\t\t\t\t\tprev = \"a\"\n\t\t\t\t}\n\t\t\t}\n\t\t\ti++\n\t\t}\n\t}\n\n\tidents := tokenSelection(text, token.IDENT)\n\tcomments := tokenSelection(text, token.COMMENT)\n\tFormatSelections(w, text, linkWriter, idents, selectionTag, comments)\n}\n\n\/\/ A link describes the (HTML) link information for an identifier.\n\/\/ The zero value of a link represents \"no link\".\n\/\/\ntype link struct {\n\tmode identMode\n\tpath, name string \/\/ package path, identifier name\n}\n\n\/\/ The identMode describes how an identifier is \"used\" at its source location.\ntype identMode int\n\nconst (\n\tidentUse identMode = iota \/\/ identifier is used (must be zero value for identMode)\n\tidentDef \/\/ identifier is defined\n\tidentVal \/\/ identifier is defined in a const or var declaration\n)\n\n\/\/ linksFor returns the list of links for the identifiers used\n\/\/ by node in the same order as they appear in the source.\n\/\/\nfunc linksFor(node ast.Node) (links []link) {\n\t\/\/ linkMap tracks link information for each ast.Ident node. Entries may\n\t\/\/ be created out of source order (for example, when we visit a parent\n\t\/\/ definition node). These links are appended to the returned slice when\n\t\/\/ their ast.Ident nodes are visited.\n\tlinkMap := make(map[*ast.Ident]link)\n\n\tast.Inspect(node, func(node ast.Node) bool {\n\t\tswitch n := node.(type) {\n\t\tcase *ast.Field:\n\t\t\tfor _, n := range n.Names {\n\t\t\t\tlinkMap[n] = link{mode: identDef}\n\t\t\t}\n\t\tcase *ast.ImportSpec:\n\t\t\tif name := n.Name; name != nil {\n\t\t\t\tlinkMap[name] = link{mode: identDef}\n\t\t\t}\n\t\tcase *ast.ValueSpec:\n\t\t\tfor _, n := range n.Names {\n\t\t\t\tlinkMap[n] = link{mode: identVal}\n\t\t\t}\n\t\tcase *ast.TypeSpec:\n\t\t\tlinkMap[n.Name] = link{mode: identDef}\n\t\tcase *ast.AssignStmt:\n\t\t\t\/\/ Short variable declarations only show up if we apply\n\t\t\t\/\/ this code to all source code (as opposed to exported\n\t\t\t\/\/ declarations only).\n\t\t\tif n.Tok == token.DEFINE {\n\t\t\t\t\/\/ Some of the lhs variables may be re-declared,\n\t\t\t\t\/\/ so technically they are not defs. We don't\n\t\t\t\t\/\/ care for now.\n\t\t\t\tfor _, x := range n.Lhs {\n\t\t\t\t\t\/\/ Each lhs expression should be an\n\t\t\t\t\t\/\/ ident, but we are conservative and check.\n\t\t\t\t\tif n, _ := x.(*ast.Ident); n != nil {\n\t\t\t\t\t\tlinkMap[n] = link{mode: identVal}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\tcase *ast.SelectorExpr:\n\t\t\t\/\/ Detect qualified identifiers of the form pkg.ident.\n\t\t\t\/\/ If anything fails we return true and collect individual\n\t\t\t\/\/ identifiers instead.\n\t\t\tif x, _ := n.X.(*ast.Ident); x != nil {\n\t\t\t\t\/\/ Create links only if x is a qualified identifier.\n\t\t\t\tif obj := x.Obj; obj != nil && obj.Kind == ast.Pkg {\n\t\t\t\t\tif spec, _ := obj.Decl.(*ast.ImportSpec); spec != nil {\n\t\t\t\t\t\t\/\/ spec.Path.Value is the import path\n\t\t\t\t\t\tif path, err := strconv.Unquote(spec.Path.Value); err == nil {\n\t\t\t\t\t\t\t\/\/ Register two links, one for the package\n\t\t\t\t\t\t\t\/\/ and one for the qualified identifier.\n\t\t\t\t\t\t\tlinkMap[x] = link{mode: identUse, path: path}\n\t\t\t\t\t\t\tlinkMap[n.Sel] = link{mode: identUse, path: path, name: n.Sel.Name}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\tcase *ast.CompositeLit:\n\t\t\t\/\/ Detect field names within composite literals. These links should\n\t\t\t\/\/ be prefixed by the type name.\n\t\t\tfieldPath := \"\"\n\t\t\tprefix := \"\"\n\t\t\tswitch typ := n.Type.(type) {\n\t\t\tcase *ast.Ident:\n\t\t\t\tprefix = typ.Name + \".\"\n\t\t\tcase *ast.SelectorExpr:\n\t\t\t\tif x, _ := typ.X.(*ast.Ident); x != nil {\n\t\t\t\t\t\/\/ Create links only if x is a qualified identifier.\n\t\t\t\t\tif obj := x.Obj; obj != nil && obj.Kind == ast.Pkg {\n\t\t\t\t\t\tif spec, _ := obj.Decl.(*ast.ImportSpec); spec != nil {\n\t\t\t\t\t\t\t\/\/ spec.Path.Value is the import path\n\t\t\t\t\t\t\tif path, err := strconv.Unquote(spec.Path.Value); err == nil {\n\t\t\t\t\t\t\t\t\/\/ Register two links, one for the package\n\t\t\t\t\t\t\t\t\/\/ and one for the qualified identifier.\n\t\t\t\t\t\t\t\tlinkMap[x] = link{mode: identUse, path: path}\n\t\t\t\t\t\t\t\tlinkMap[typ.Sel] = link{mode: identUse, path: path, name: typ.Sel.Name}\n\t\t\t\t\t\t\t\tfieldPath = path\n\t\t\t\t\t\t\t\tprefix = typ.Sel.Name + \".\"\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\tfor _, e := range n.Elts {\n\t\t\t\tif kv, ok := e.(*ast.KeyValueExpr); ok {\n\t\t\t\t\tif k, ok := kv.Key.(*ast.Ident); ok {\n\t\t\t\t\t\t\/\/ Note: there is some syntactic ambiguity here. We cannot determine\n\t\t\t\t\t\t\/\/ if this is a struct literal or a map literal without type\n\t\t\t\t\t\t\/\/ information. We assume struct literal.\n\t\t\t\t\t\tname := prefix + k.Name\n\t\t\t\t\t\tlinkMap[k] = link{mode: identUse, path: fieldPath, name: name}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\tcase *ast.Ident:\n\t\t\tif l, ok := linkMap[n]; ok {\n\t\t\t\tlinks = append(links, l)\n\t\t\t} else {\n\t\t\t\tl := link{mode: identUse, name: n.Name}\n\t\t\t\tif n.Obj == nil && predeclared[n.Name] {\n\t\t\t\t\tl.path = builtinPkgPath\n\t\t\t\t}\n\t\t\t\tlinks = append(links, l)\n\t\t\t}\n\t\t}\n\t\treturn true\n\t})\n\treturn\n}\n\n\/\/ The predeclared map represents the set of all predeclared identifiers.\n\/\/ TODO(gri) This information is also encoded in similar maps in go\/doc,\n\/\/ but not exported. Consider exporting an accessor and using\n\/\/ it instead.\nvar predeclared = map[string]bool{\n\t\"bool\": true,\n\t\"byte\": true,\n\t\"complex64\": true,\n\t\"complex128\": true,\n\t\"error\": true,\n\t\"float32\": true,\n\t\"float64\": true,\n\t\"int\": true,\n\t\"int8\": true,\n\t\"int16\": true,\n\t\"int32\": true,\n\t\"int64\": true,\n\t\"rune\": true,\n\t\"string\": true,\n\t\"uint\": true,\n\t\"uint8\": true,\n\t\"uint16\": true,\n\t\"uint32\": true,\n\t\"uint64\": true,\n\t\"uintptr\": true,\n\t\"true\": true,\n\t\"false\": true,\n\t\"iota\": true,\n\t\"nil\": true,\n\t\"append\": true,\n\t\"cap\": true,\n\t\"close\": true,\n\t\"complex\": true,\n\t\"copy\": true,\n\t\"delete\": true,\n\t\"imag\": true,\n\t\"len\": true,\n\t\"make\": true,\n\t\"new\": true,\n\t\"panic\": true,\n\t\"print\": true,\n\t\"println\": true,\n\t\"real\": true,\n\t\"recover\": true,\n}\n","avg_line_length":29.8790322581,"max_line_length":83,"alphanum_fraction":0.6026990553} +{"size":1685,"ext":"go","lang":"Go","max_stars_count":null,"content":"\/\/ Code generated by \"go generate github.com\/neoyagami\/gonum\/unit\u201d; DO NOT EDIT.\n\n\/\/ Copyright \u00a92014 The Gonum Authors. All rights reserved.\n\/\/ Use of this source code is governed by a BSD-style\n\/\/ license that can be found in the LICENSE file.\n\npackage unit\n\nimport (\n\t\"errors\"\n\t\"fmt\"\n\t\"math\"\n\t\"unicode\/utf8\"\n)\n\n\/\/ Frequency represents a frequency in Hertz.\ntype Frequency float64\n\nconst Hertz Frequency = 1\n\n\/\/ Unit converts the Frequency to a *Unit.\nfunc (f Frequency) Unit() *Unit {\n\treturn New(float64(f), Dimensions{\n\t\tTimeDim: -1,\n\t})\n}\n\n\/\/ Frequency allows Frequency to implement a Frequencyer interface.\nfunc (f Frequency) Frequency() Frequency {\n\treturn f\n}\n\n\/\/ From converts the unit into the receiver. From returns an\n\/\/ error if there is a mismatch in dimension.\nfunc (f *Frequency) From(u Uniter) error {\n\tif !DimensionsMatch(u, Hertz) {\n\t\t*f = Frequency(math.NaN())\n\t\treturn errors.New(\"unit: dimension mismatch\")\n\t}\n\t*f = Frequency(u.Unit().Value())\n\treturn nil\n}\n\nfunc (f Frequency) Format(fs fmt.State, c rune) {\n\tswitch c {\n\tcase 'v':\n\t\tif fs.Flag('#') {\n\t\t\tfmt.Fprintf(fs, \"%T(%v)\", f, float64(f))\n\t\t\treturn\n\t\t}\n\t\tfallthrough\n\tcase 'e', 'E', 'f', 'F', 'g', 'G':\n\t\tp, pOk := fs.Precision()\n\t\tw, wOk := fs.Width()\n\t\tconst unit = \" Hz\"\n\t\tswitch {\n\t\tcase pOk && wOk:\n\t\t\tfmt.Fprintf(fs, \"%*.*\"+string(c), pos(w-utf8.RuneCount([]byte(unit))), p, float64(f))\n\t\tcase pOk:\n\t\t\tfmt.Fprintf(fs, \"%.*\"+string(c), p, float64(f))\n\t\tcase wOk:\n\t\t\tfmt.Fprintf(fs, \"%*\"+string(c), pos(w-utf8.RuneCount([]byte(unit))), float64(f))\n\t\tdefault:\n\t\t\tfmt.Fprintf(fs, \"%\"+string(c), float64(f))\n\t\t}\n\t\tfmt.Fprint(fs, unit)\n\tdefault:\n\t\tfmt.Fprintf(fs, \"%%!%c(%T=%g Hz)\", c, f, float64(f))\n\t}\n}\n","avg_line_length":23.7323943662,"max_line_length":88,"alphanum_fraction":0.6451038576} +{"size":93,"ext":"go","lang":"Go","max_stars_count":2.0,"content":"package common_services\n\ntype SmsSender interface {\n\tSend(to string, message string) error\n}\n","avg_line_length":15.5,"max_line_length":38,"alphanum_fraction":0.7956989247} +{"size":7113,"ext":"go","lang":"Go","max_stars_count":null,"content":"\/*\nCopyright 2016 The Kubernetes Authors All rights reserved.\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\nyou may not use this file except in compliance with the License.\nYou may obtain a copy of the License at\n\n\thttp:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\nUnless required by applicable law or agreed to in writing, software\ndistributed under the License is distributed on an \"AS IS\" BASIS,\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\nSee the License for the specific language governing permissions and\nlimitations under the License.\n*\/\n\npackage store\n\nimport (\n\t\"context\"\n\t\"time\"\n\n\t\"github.com\/pkg\/errors\"\n\t\"github.com\/robfig\/cron\/v3\"\n\tbatchv1beta1 \"k8s.io\/api\/batch\/v1beta1\"\n\tmetav1 \"k8s.io\/apimachinery\/pkg\/apis\/meta\/v1\"\n\t\"k8s.io\/apimachinery\/pkg\/runtime\"\n\t\"k8s.io\/apimachinery\/pkg\/watch\"\n\tclientset \"k8s.io\/client-go\/kubernetes\"\n\t\"k8s.io\/client-go\/tools\/cache\"\n\n\t\"github.com\/btwiuse\/k16s\/v2\/pkg\/metric\"\n\tgenerator \"github.com\/btwiuse\/k16s\/v2\/pkg\/metric_generator\"\n)\n\nvar (\n\tdescCronJobLabelsName = \"kube_cronjob_labels\"\n\tdescCronJobLabelsHelp = \"Kubernetes labels converted to Prometheus labels.\"\n\tdescCronJobLabelsDefaultLabels = []string{\"namespace\", \"cronjob\"}\n)\n\nfunc cronJobMetricFamilies(allowLabelsList []string) []generator.FamilyGenerator {\n\treturn []generator.FamilyGenerator{\n\t\t*generator.NewFamilyGenerator(\n\t\t\tdescCronJobLabelsName,\n\t\t\tdescCronJobLabelsHelp,\n\t\t\tmetric.Gauge,\n\t\t\t\"\",\n\t\t\twrapCronJobFunc(func(j *batchv1beta1.CronJob) *metric.Family {\n\t\t\t\tlabelKeys, labelValues := createLabelKeysValues(j.Labels, allowLabelsList)\n\t\t\t\treturn &metric.Family{\n\t\t\t\t\tMetrics: []*metric.Metric{\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tLabelKeys: labelKeys,\n\t\t\t\t\t\t\tLabelValues: labelValues,\n\t\t\t\t\t\t\tValue: 1,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t}\n\t\t\t}),\n\t\t),\n\t\t*generator.NewFamilyGenerator(\n\t\t\t\"kube_cronjob_info\",\n\t\t\t\"Info about cronjob.\",\n\t\t\tmetric.Gauge,\n\t\t\t\"\",\n\t\t\twrapCronJobFunc(func(j *batchv1beta1.CronJob) *metric.Family {\n\t\t\t\treturn &metric.Family{\n\t\t\t\t\tMetrics: []*metric.Metric{\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tLabelKeys: []string{\"schedule\", \"concurrency_policy\"},\n\t\t\t\t\t\t\tLabelValues: []string{j.Spec.Schedule, string(j.Spec.ConcurrencyPolicy)},\n\t\t\t\t\t\t\tValue: 1,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t}\n\t\t\t}),\n\t\t),\n\t\t*generator.NewFamilyGenerator(\n\t\t\t\"kube_cronjob_created\",\n\t\t\t\"Unix creation timestamp\",\n\t\t\tmetric.Gauge,\n\t\t\t\"\",\n\t\t\twrapCronJobFunc(func(j *batchv1beta1.CronJob) *metric.Family {\n\t\t\t\tms := []*metric.Metric{}\n\t\t\t\tif !j.CreationTimestamp.IsZero() {\n\t\t\t\t\tms = append(ms, &metric.Metric{\n\t\t\t\t\t\tLabelKeys: []string{},\n\t\t\t\t\t\tLabelValues: []string{},\n\t\t\t\t\t\tValue: float64(j.CreationTimestamp.Unix()),\n\t\t\t\t\t})\n\t\t\t\t}\n\n\t\t\t\treturn &metric.Family{\n\t\t\t\t\tMetrics: ms,\n\t\t\t\t}\n\t\t\t}),\n\t\t),\n\t\t*generator.NewFamilyGenerator(\n\t\t\t\"kube_cronjob_status_active\",\n\t\t\t\"Active holds pointers to currently running jobs.\",\n\t\t\tmetric.Gauge,\n\t\t\t\"\",\n\t\t\twrapCronJobFunc(func(j *batchv1beta1.CronJob) *metric.Family {\n\t\t\t\treturn &metric.Family{\n\t\t\t\t\tMetrics: []*metric.Metric{\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tLabelKeys: []string{},\n\t\t\t\t\t\t\tLabelValues: []string{},\n\t\t\t\t\t\t\tValue: float64(len(j.Status.Active)),\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t}\n\t\t\t}),\n\t\t),\n\t\t*generator.NewFamilyGenerator(\n\t\t\t\"kube_cronjob_status_last_schedule_time\",\n\t\t\t\"LastScheduleTime keeps information of when was the last time the job was successfully scheduled.\",\n\t\t\tmetric.Gauge,\n\t\t\t\"\",\n\t\t\twrapCronJobFunc(func(j *batchv1beta1.CronJob) *metric.Family {\n\t\t\t\tms := []*metric.Metric{}\n\n\t\t\t\tif j.Status.LastScheduleTime != nil {\n\t\t\t\t\tms = append(ms, &metric.Metric{\n\t\t\t\t\t\tLabelKeys: []string{},\n\t\t\t\t\t\tLabelValues: []string{},\n\t\t\t\t\t\tValue: float64(j.Status.LastScheduleTime.Unix()),\n\t\t\t\t\t})\n\t\t\t\t}\n\n\t\t\t\treturn &metric.Family{\n\t\t\t\t\tMetrics: ms,\n\t\t\t\t}\n\t\t\t}),\n\t\t),\n\t\t*generator.NewFamilyGenerator(\n\t\t\t\"kube_cronjob_spec_suspend\",\n\t\t\t\"Suspend flag tells the controller to suspend subsequent executions.\",\n\t\t\tmetric.Gauge,\n\t\t\t\"\",\n\t\t\twrapCronJobFunc(func(j *batchv1beta1.CronJob) *metric.Family {\n\t\t\t\tms := []*metric.Metric{}\n\n\t\t\t\tif j.Spec.Suspend != nil {\n\t\t\t\t\tms = append(ms, &metric.Metric{\n\t\t\t\t\t\tLabelKeys: []string{},\n\t\t\t\t\t\tLabelValues: []string{},\n\t\t\t\t\t\tValue: boolFloat64(*j.Spec.Suspend),\n\t\t\t\t\t})\n\t\t\t\t}\n\n\t\t\t\treturn &metric.Family{\n\t\t\t\t\tMetrics: ms,\n\t\t\t\t}\n\t\t\t}),\n\t\t),\n\t\t*generator.NewFamilyGenerator(\n\t\t\t\"kube_cronjob_spec_starting_deadline_seconds\",\n\t\t\t\"Deadline in seconds for starting the job if it misses scheduled time for any reason.\",\n\t\t\tmetric.Gauge,\n\t\t\t\"\",\n\t\t\twrapCronJobFunc(func(j *batchv1beta1.CronJob) *metric.Family {\n\t\t\t\tms := []*metric.Metric{}\n\n\t\t\t\tif j.Spec.StartingDeadlineSeconds != nil {\n\t\t\t\t\tms = append(ms, &metric.Metric{\n\t\t\t\t\t\tLabelKeys: []string{},\n\t\t\t\t\t\tLabelValues: []string{},\n\t\t\t\t\t\tValue: float64(*j.Spec.StartingDeadlineSeconds),\n\t\t\t\t\t})\n\n\t\t\t\t}\n\n\t\t\t\treturn &metric.Family{\n\t\t\t\t\tMetrics: ms,\n\t\t\t\t}\n\t\t\t}),\n\t\t),\n\t\t*generator.NewFamilyGenerator(\n\t\t\t\"kube_cronjob_next_schedule_time\",\n\t\t\t\"Next time the cronjob should be scheduled. The time after lastScheduleTime, or after the cron job's creation time if it's never been scheduled. Use this to determine if the job is delayed.\",\n\t\t\tmetric.Gauge,\n\t\t\t\"\",\n\t\t\twrapCronJobFunc(func(j *batchv1beta1.CronJob) *metric.Family {\n\t\t\t\tms := []*metric.Metric{}\n\n\t\t\t\t\/\/ If the cron job is suspended, don't track the next scheduled time\n\t\t\t\tnextScheduledTime, err := getNextScheduledTime(j.Spec.Schedule, j.Status.LastScheduleTime, j.CreationTimestamp)\n\t\t\t\tif err != nil {\n\t\t\t\t\tpanic(err)\n\t\t\t\t} else if !*j.Spec.Suspend {\n\t\t\t\t\tms = append(ms, &metric.Metric{\n\t\t\t\t\t\tLabelKeys: []string{},\n\t\t\t\t\t\tLabelValues: []string{},\n\t\t\t\t\t\tValue: float64(nextScheduledTime.Unix()),\n\t\t\t\t\t})\n\t\t\t\t}\n\n\t\t\t\treturn &metric.Family{\n\t\t\t\t\tMetrics: ms,\n\t\t\t\t}\n\t\t\t}),\n\t\t),\n\t}\n}\n\nfunc wrapCronJobFunc(f func(*batchv1beta1.CronJob) *metric.Family) func(interface{}) *metric.Family {\n\treturn func(obj interface{}) *metric.Family {\n\t\tcronJob := obj.(*batchv1beta1.CronJob)\n\n\t\tmetricFamily := f(cronJob)\n\n\t\tfor _, m := range metricFamily.Metrics {\n\t\t\tm.LabelKeys = append(descCronJobLabelsDefaultLabels, m.LabelKeys...)\n\t\t\tm.LabelValues = append([]string{cronJob.Namespace, cronJob.Name}, m.LabelValues...)\n\t\t}\n\n\t\treturn metricFamily\n\t}\n}\n\nfunc createCronJobListWatch(kubeClient clientset.Interface, ns string) cache.ListerWatcher {\n\treturn &cache.ListWatch{\n\t\tListFunc: func(opts metav1.ListOptions) (runtime.Object, error) {\n\t\t\treturn kubeClient.BatchV1beta1().CronJobs(ns).List(context.TODO(), opts)\n\t\t},\n\t\tWatchFunc: func(opts metav1.ListOptions) (watch.Interface, error) {\n\t\t\treturn kubeClient.BatchV1beta1().CronJobs(ns).Watch(context.TODO(), opts)\n\t\t},\n\t}\n}\n\nfunc getNextScheduledTime(schedule string, lastScheduleTime *metav1.Time, createdTime metav1.Time) (time.Time, error) {\n\tsched, err := cron.ParseStandard(schedule)\n\tif err != nil {\n\t\treturn time.Time{}, errors.Wrapf(err, \"Failed to parse cron job schedule '%s'\", schedule)\n\t}\n\tif !lastScheduleTime.IsZero() {\n\t\treturn sched.Next(lastScheduleTime.Time), nil\n\t}\n\tif !createdTime.IsZero() {\n\t\treturn sched.Next(createdTime.Time), nil\n\t}\n\treturn time.Time{}, errors.New(\"createdTime and lastScheduleTime are both zero\")\n}\n","avg_line_length":28.7975708502,"max_line_length":194,"alphanum_fraction":0.6732742865} +{"size":2779,"ext":"go","lang":"Go","max_stars_count":1.0,"content":"package jwt\n\nimport (\n\t\"io\/ioutil\"\n\t\"strings\"\n\t\"testing\"\n\n\t\"github.com\/gotoxu\/assert\"\n)\n\nvar hmacTestData = []struct {\n\tname string\n\ttokenString string\n\talg string\n\tclaims map[string]interface{}\n\tvalid bool\n}{\n\t{\n\t\t\"web sample\",\n\t\t\"eyJ0eXAiOiJKV1QiLA0KICJhbGciOiJIUzI1NiJ9.eyJpc3MiOiJqb2UiLA0KICJleHAiOjEzMDA4MTkzODAsDQogImh0dHA6Ly9leGFtcGxlLmNvbS9pc19yb290Ijp0cnVlfQ.AmUus1JoLd_ZBRLipjgE4JYpo708f-3Gwm8q3XrmcAU\",\n\t\t\"HS256\",\n\t\tmap[string]interface{}{\"iss\": \"joe\", \"exp\": 1300819380, \"http:\/\/example.com\/is_root\": true},\n\t\ttrue,\n\t},\n\t{\n\t\t\"HS384\",\n\t\t\"eyJhbGciOiJIUzM4NCIsInR5cCI6IkpXVCJ9.eyJleHAiOjEuMzAwODE5MzhlKzA5LCJodHRwOi8vZXhhbXBsZS5jb20vaXNfcm9vdCI6dHJ1ZSwiaXNzIjoiam9lIn0.0R356iZXLpQAlD793UIpuVtqjO_fETDzvIVIoXeTYqyfkjKb-sPq9nVHP216KinG\",\n\t\t\"HS384\",\n\t\tmap[string]interface{}{\"iss\": \"joe\", \"exp\": 1300819380, \"http:\/\/example.com\/is_root\": true},\n\t\ttrue,\n\t},\n\t{\n\t\t\"HS512\",\n\t\t\"eyJhbGciOiJIUzUxMiIsInR5cCI6IkpXVCJ9.eyJleHAiOjEuMzAwODE5MzhlKzA5LCJodHRwOi8vZXhhbXBsZS5jb20vaXNfcm9vdCI6dHJ1ZSwiaXNzIjoiam9lIn0.Y-x6Ld37HgYXEdqfm9b98TsRHH-usSUxK5BEivjiDZYD5-laWBb2KPoqugcBuxVxfE0pJOiAx_oznQVyycWUUw\",\n\t\t\"HS512\",\n\t\tmap[string]interface{}{\"iss\": \"joe\", \"exp\": 1300819380, \"http:\/\/example.com\/is_root\": true},\n\t\ttrue,\n\t},\n\t{\n\t\t\"web sample: invalid\",\n\t\t\"eyJ0eXAiOiJKV1QiLA0KICJhbGciOiJIUzI1NiJ9.eyJpc3MiOiJqb2UiLA0KICJleHAiOjEzMDA4MTkzODAsDQogImh0dHA6Ly9leGFtcGxlLmNvbS9pc19yb290Ijp0cnVlfQ.dBjftJeZ4CVP-mB92K27uhbUJU1p1r_wW1gFWFOEjXo\",\n\t\t\"HS256\",\n\t\tmap[string]interface{}{\"iss\": \"joe\", \"exp\": 1300819380, \"http:\/\/example.com\/is_root\": true},\n\t\tfalse,\n\t},\n}\n\nvar hmacTestKey, _ = ioutil.ReadFile(\"test\/hmacTestKey\")\n\nfunc TestHMACSign(t *testing.T) {\n\tfor _, data := range hmacTestData {\n\t\tif data.valid {\n\t\t\tparts := strings.Split(data.tokenString, \".\")\n\t\t\tm := GetSigningMethod(data.alg)\n\t\t\tsig, err := m.Sign(strings.Join(parts[0:2], \".\"), hmacTestKey)\n\t\t\tassert.Nil(t, err)\n\t\t\tassert.DeepEqual(t, sig, parts[2])\n\t\t}\n\t}\n}\n\nfunc TestHMACVerify(t *testing.T) {\n\tfor _, data := range hmacTestData {\n\t\tparts := strings.Split(data.tokenString, \".\")\n\n\t\tm := GetSigningMethod(data.alg)\n\t\terr := m.Verify(strings.Join(parts[0:2], \".\"), parts[2], hmacTestKey)\n\n\t\tif data.valid {\n\t\t\tassert.Nil(t, err)\n\t\t}\n\t\tif !data.valid {\n\t\t\tassert.NotNil(t, err)\n\t\t}\n\t}\n}\n\nfunc BenchmarkHS256Signing(b *testing.B) {\n\tbenchmarkSigning(b, HS256Method, hmacTestKey)\n}\n\nfunc BenchmarkHS384Signing(b *testing.B) {\n\tbenchmarkSigning(b, HS384Method, hmacTestKey)\n}\n\nfunc BenchmarkHS512Signing(b *testing.B) {\n\tbenchmarkSigning(b, HS512Method, hmacTestKey)\n}\n\nfunc benchmarkSigning(b *testing.B, method SigningMethod, key interface{}) {\n\tt := New(method)\n\tb.RunParallel(func(pb *testing.PB) {\n\t\tfor pb.Next() {\n\t\t\tif _, err := t.Generate(key); err != nil {\n\t\t\t\tb.Fatal(err)\n\t\t\t}\n\t\t}\n\t})\n}\n","avg_line_length":27.79,"max_line_length":220,"alphanum_fraction":0.7275998561} +{"size":802,"ext":"go","lang":"Go","max_stars_count":null,"content":"package vecengine\n\nimport (\n\t\"errors\"\n\n\t\"github.com\/TechPay-io\/sirius-base\/hash\"\n\t\"github.com\/TechPay-io\/sirius-base\/inter\/dag\"\n)\n\n\/\/ DfsSubgraph iterates all the event which are observed by head, and accepted by a filter\n\/\/ Excluding head\n\/\/ filter MAY BE called twice for the same event.\nfunc (vi *Engine) DfsSubgraph(head dag.Event, walk func(hash.Event) (godeeper bool)) error {\n\tstack := make(hash.EventsStack, 0, vi.validators.Len()*5)\n\n\t\/\/ first element\n\tstack.PushAll(head.Parents())\n\n\tfor next := stack.Pop(); next != nil; next = stack.Pop() {\n\t\tcurr := *next\n\n\t\t\/\/ filter\n\t\tif !walk(curr) {\n\t\t\tcontinue\n\t\t}\n\n\t\tevent := vi.getEvent(curr)\n\t\tif event == nil {\n\t\t\treturn errors.New(\"event not found \" + curr.String())\n\t\t}\n\n\t\t\/\/ memorize parents\n\t\tstack.PushAll(event.Parents())\n\t}\n\n\treturn nil\n}\n","avg_line_length":21.1052631579,"max_line_length":92,"alphanum_fraction":0.6783042394} +{"size":2227,"ext":"go","lang":"Go","max_stars_count":null,"content":"package main\n\nimport (\n\t\"flag\"\n\t\"os\"\n\t\"path\/filepath\"\n\t\"time\"\n\n\t\"k8s.io\/client-go\/util\/homedir\"\n\n\t\"github.com\/slok\/service-level-operator\/pkg\/operator\"\n)\n\n\/\/ defaults\nconst (\n\tdefMetricsPath = \"\/metrics\"\n\tdefListenAddress = \":8080\"\n\tdefResyncSeconds = 5\n\tdefWorkers = 10\n)\n\ntype cmdFlags struct {\n\tfs *flag.FlagSet\n\n\tkubeConfig string\n\tresyncSeconds int\n\tworkers int\n\tmetricsPath string\n\tlistenAddress string\n\tlabelSelector string\n\tnamespace string\n\tdebug bool\n\tdevelopment bool\n\tfake bool\n}\n\nfunc newCmdFlags() *cmdFlags {\n\tc := &cmdFlags{\n\t\tfs: flag.NewFlagSet(os.Args[0], flag.ExitOnError),\n\t}\n\tc.init()\n\n\treturn c\n}\n\nfunc (c *cmdFlags) init() {\n\n\tkubehome := filepath.Join(homedir.HomeDir(), \".kube\", \"config\")\n\t\/\/ register flags\n\tc.fs.StringVar(&c.kubeConfig, \"kubeconfig\", kubehome, \"kubernetes configuration path, only used when development mode enabled\")\n\tc.fs.StringVar(&c.metricsPath, \"metrics-path\", defMetricsPath, \"the path where the metrics will be served\")\n\tc.fs.StringVar(&c.listenAddress, \"listen-addr\", defListenAddress, \"the address where the metrics will be exposed\")\n\tc.fs.StringVar(&c.labelSelector, \"selector\", \"\", \"selector (label query) to filter on, supports '=', '==', and '!='.(e.g. -l key1=value1,key2=value2)\")\n\tc.fs.StringVar(&c.namespace, \"namespace\", \"\", \"the namespace to filter on, by default all\")\n\tc.fs.IntVar(&c.resyncSeconds, \"resync-seconds\", defResyncSeconds, \"the number of seconds for the SLO calculation interval\")\n\tc.fs.IntVar(&c.workers, \"workers\", defWorkers, \"the number of concurrent workers per controller handling events\")\n\tc.fs.BoolVar(&c.development, \"development\", false, \"development flag will allow to run the operator outside a kubernetes cluster\")\n\tc.fs.BoolVar(&c.debug, \"debug\", false, \"enable debug mode\")\n\tc.fs.BoolVar(&c.fake, \"fake\", false, \"enable faked mode, in faked node external services\/dependencies are not needed\")\n\n\t\/\/ Parse flags\n\tc.fs.Parse(os.Args[1:])\n}\n\nfunc (c *cmdFlags) toOperatorConfig() operator.Config {\n\treturn operator.Config{\n\t\tResyncPeriod: time.Duration(c.resyncSeconds) * time.Second,\n\t\tConcurretWorkers: c.workers,\n\t\tLabelSelector: c.labelSelector,\n\t\tNamespace: c.namespace,\n\t}\n}\n","avg_line_length":30.5068493151,"max_line_length":152,"alphanum_fraction":0.7112707678} +{"size":1234,"ext":"go","lang":"Go","max_stars_count":null,"content":"package main\n\nimport (\n\t\"encoding\/json\"\n\t\"fmt\"\n\t\"log\"\n\t\"net\/http\"\n\t\"time\"\n)\n\n\/\/ JSON Writes the interface to the wire\nfunc JSON(writer http.ResponseWriter, v interface{}) {\n\tif body, err := json.Marshal(v); err != nil {\n\t\tServerError(writer, err)\n\t} else {\n\t\twriter.Write(body)\n\t}\n}\n\n\/\/ JSONB Just the bytes mam\nfunc JSONB(v interface{}) []byte {\n\tb, _ := json.Marshal(v)\n\treturn b\n}\n\n\/\/ NotFoundError Writer 404 {error: not fount}\nfunc NotFoundError(writer http.ResponseWriter, kind string, key int64, err error) {\n\tlog.Printf(\"Error finding %s:%d %s\\n\", kind, key, err)\n\twriter.WriteHeader(http.StatusNotFound)\n\twriter.Write(JSONB(map[string]error{\"error\": err}))\n}\n\nfunc BadRequest(writer http.ResponseWriter, err error) {\n\tlog.Println(\"Bad request:\", err)\n\twriter.WriteHeader(400)\n\twriter.Write(JSONB(map[string]error{\"error\": err}))\n}\n\nfunc ServerError(writer http.ResponseWriter, err error) {\n\tfmt.Println(\"Error:\", err)\n\twriter.WriteHeader(500)\n}\n\nfunc Unauthorized(writer http.ResponseWriter, v interface{}) {\n\twriter.WriteHeader(http.StatusUnauthorized)\n}\n\nfunc ParseTime(timeString string) time.Time {\n\tt, err := time.Parse(time.RFC3339, timeString)\n\tif err != nil {\n\t\tlog.Println(\"Error parsing time:\", err)\n\t}\n\treturn t\n}\n","avg_line_length":22.4363636364,"max_line_length":83,"alphanum_fraction":0.709076175} +{"size":67783,"ext":"go","lang":"Go","max_stars_count":null,"content":"\/\/ Copyright (c) 2015-2017 The Decred developers\n\/\/ Copyright (c) 2018-2020 The Hc developers\n\/\/ Use of this source code is governed by an ISC\n\/\/ license that can be found in the LICENSE file.\n\npackage stake_test\n\nimport (\n\t\"bytes\"\n\t\"encoding\/hex\"\n\t\"reflect\"\n\t\"testing\"\n\n\t\"github.com\/nbit99\/hcd\/blockchain\/stake\"\n\t\"github.com\/nbit99\/hcd\/chaincfg\"\n\t\"github.com\/nbit99\/hcd\/chaincfg\/chainhash\"\n\t\"github.com\/nbit99\/hcd\/hcutil\"\n\t\"github.com\/nbit99\/hcd\/txscript\"\n\t\"github.com\/nbit99\/hcd\/wire\"\n)\n\n\/\/ SSTX TESTING -------------------------------------------------------------------\nfunc TestIsSStx(t *testing.T) {\n\tvar sstx = hcutil.NewTx(sstxMsgTx)\n\tsstx.SetTree(wire.TxTreeStake)\n\tsstx.SetIndex(0)\n\n\ttest, err := stake.IsSStx(sstx.MsgTx())\n\tif !test || err != nil {\n\t\tt.Errorf(\"IsSSTx should have returned true, but instead returned %v\"+\n\t\t\t\",%v\", test, err)\n\t}\n\n\t\/\/ ---------------------------------------------------------------------------\n\t\/\/ Test for an OP_RETURN commitment push of the maximum size\n\tbiggestPush := []byte{\n\t\t0x6a, 0x4b, \/\/ OP_RETURN Push 75-bytes\n\t\t0x14, 0x94, 0x8c, 0x76, 0x5a, 0x69, 0x14, 0xd4, \/\/ 75 bytes\n\t\t0x3f, 0x2a, 0x7a, 0xc1, 0x77, 0xda, 0x2c, 0x2f,\n\t\t0x6b, 0x52, 0xde, 0x3d, 0x7c, 0x7c, 0x7c, 0x7c,\n\t\t0x6b, 0x52, 0xde, 0x3d, 0x7c, 0x7c, 0x7c, 0x7c,\n\t\t0x6b, 0x52, 0xde, 0x3d, 0x7c, 0x7c, 0x7c, 0x7c,\n\t\t0x6b, 0x52, 0xde, 0x3d, 0x7c, 0x7c, 0x7c, 0x7c,\n\t\t0x6b, 0x52, 0xde, 0x3d, 0x7c, 0x7c, 0x7c, 0x7c,\n\t\t0x6b, 0x52, 0xde, 0x3d, 0x7c, 0x7c, 0x7c, 0x7c,\n\t\t0x6b, 0x52, 0xde, 0x3d, 0x7c, 0x7c, 0x7c, 0x7c,\n\t\t0x6b, 0x52, 0xde,\n\t}\n\n\tsstx = hcutil.NewTxDeep(sstxMsgTx)\n\tsstx.MsgTx().TxOut[1].PkScript = biggestPush\n\tsstx.SetTree(wire.TxTreeStake)\n\tsstx.SetIndex(0)\n\n\ttest, err = stake.IsSStx(sstx.MsgTx())\n\tif !test || err != nil {\n\t\tt.Errorf(\"IsSSTx should have returned true, but instead returned %v\"+\n\t\t\t\",%v\", test, err)\n\t}\n}\n\nfunc TestIsSSTxErrors(t *testing.T) {\n\t\/\/ Initialize the buffer for later manipulation\n\tvar buf bytes.Buffer\n\tbuf.Grow(sstxMsgTx.SerializeSize())\n\terr := sstxMsgTx.Serialize(&buf)\n\tif err != nil {\n\t\tt.Errorf(\"Error serializing the reference sstx: %v\", err)\n\t}\n\tbufBytes := buf.Bytes()\n\n\t\/\/ ---------------------------------------------------------------------------\n\t\/\/ Test too many inputs with sstxMsgTxExtraInputs\n\n\tvar sstxExtraInputs = hcutil.NewTx(sstxMsgTxExtraInput)\n\tsstxExtraInputs.SetTree(wire.TxTreeStake)\n\tsstxExtraInputs.SetIndex(0)\n\n\ttest, err := stake.IsSStx(sstxExtraInputs.MsgTx())\n\tif test || err.(stake.RuleError).GetCode() !=\n\t\tstake.ErrSStxTooManyInputs {\n\t\tt.Errorf(\"IsSSTx should have returned false,%v but instead returned %v\"+\n\t\t\t\",%v\", stake.ErrSStxTooManyInputs, test, err)\n\t}\n\n\t\/\/ ---------------------------------------------------------------------------\n\t\/\/ Test too many outputs with sstxMsgTxExtraOutputs\n\n\tvar sstxExtraOutputs = hcutil.NewTx(sstxMsgTxExtraOutputs)\n\tsstxExtraOutputs.SetTree(wire.TxTreeStake)\n\tsstxExtraOutputs.SetIndex(0)\n\n\ttest, err = stake.IsSStx(sstxExtraOutputs.MsgTx())\n\tif test || err.(stake.RuleError).GetCode() !=\n\t\tstake.ErrSStxTooManyOutputs {\n\t\tt.Errorf(\"IsSSTx should have returned false,%v but instead returned %v\"+\n\t\t\t\",%v\", stake.ErrSStxTooManyOutputs, test, err)\n\t}\n\n\t\/\/ ---------------------------------------------------------------------------\n\t\/\/ Check to make sure the first output is OP_SSTX tagged\n\n\tvar tx wire.MsgTx\n\ttestFirstOutTagged := bytes.Replace(bufBytes,\n\t\t[]byte{0x00, 0xe3, 0x23, 0x21, 0x00, 0x00, 0x00, 0x00,\n\t\t\t0x00, 0x00, 0x1a, 0xba},\n\t\t[]byte{0x00, 0xe3, 0x23, 0x21, 0x00, 0x00, 0x00, 0x00,\n\t\t\t0x00, 0x00, 0x19},\n\t\t1)\n\n\t\/\/ Deserialize the manipulated tx\n\trbuf := bytes.NewReader(testFirstOutTagged)\n\terr = tx.Deserialize(rbuf)\n\tif err != nil {\n\t\tt.Errorf(\"Deserialize error %v\", err)\n\t}\n\n\tvar sstxUntaggedOut = hcutil.NewTx(&tx)\n\tsstxUntaggedOut.SetTree(wire.TxTreeStake)\n\tsstxUntaggedOut.SetIndex(0)\n\n\ttest, err = stake.IsSStx(sstxUntaggedOut.MsgTx())\n\tif test || err.(stake.RuleError).GetCode() !=\n\t\tstake.ErrSStxInvalidOutputs {\n\t\tt.Errorf(\"IsSSTx should have returned false,%v but instead returned %v\"+\n\t\t\t\",%v\", stake.ErrSStxInvalidOutputs, test, err)\n\t}\n\n\t\/\/ ---------------------------------------------------------------------------\n\t\/\/ Test for mismatched number of inputs versus number of outputs\n\n\tvar sstxInsOutsMismatched = hcutil.NewTx(sstxMismatchedInsOuts)\n\tsstxInsOutsMismatched.SetTree(wire.TxTreeStake)\n\tsstxInsOutsMismatched.SetIndex(0)\n\n\ttest, err = stake.IsSStx(sstxInsOutsMismatched.MsgTx())\n\tif test || err.(stake.RuleError).GetCode() !=\n\t\tstake.ErrSStxInOutProportions {\n\t\tt.Errorf(\"IsSSTx should have returned false,%v but instead returned %v\"+\n\t\t\t\",%v\", stake.ErrSStxInOutProportions, test, err)\n\t}\n\n\t\/\/ ---------------------------------------------------------------------------\n\t\/\/ Test for bad version of output.\n\tvar sstxBadVerOut = hcutil.NewTx(sstxBadVersionOut)\n\tsstxBadVerOut.SetTree(wire.TxTreeStake)\n\tsstxBadVerOut.SetIndex(0)\n\n\ttest, err = stake.IsSStx(sstxBadVerOut.MsgTx())\n\tif test || err.(stake.RuleError).GetCode() !=\n\t\tstake.ErrSStxInvalidOutputs {\n\t\tt.Errorf(\"IsSSTx should have returned false,%v but instead returned %v\"+\n\t\t\t\",%v\", stake.ErrSStxInvalidOutputs, test, err)\n\t}\n\n\t\/\/ ---------------------------------------------------------------------------\n\t\/\/ Test for second or more output not being OP_RETURN push\n\n\tvar sstxNoNullData = hcutil.NewTx(sstxNullDataMissing)\n\tsstxNoNullData.SetTree(wire.TxTreeStake)\n\tsstxNoNullData.SetIndex(0)\n\n\ttest, err = stake.IsSStx(sstxNoNullData.MsgTx())\n\tif test || err.(stake.RuleError).GetCode() !=\n\t\tstake.ErrSStxInvalidOutputs {\n\t\tt.Errorf(\"IsSSTx should have returned false,%v but instead returned %v\"+\n\t\t\t\",%v\", stake.ErrSStxInvalidOutputs, test, err)\n\t}\n\n\t\/\/ ---------------------------------------------------------------------------\n\t\/\/ Test for change output being in the wrong place\n\n\tvar sstxNullDataMis = hcutil.NewTx(sstxNullDataMisplaced)\n\tsstxNullDataMis.SetTree(wire.TxTreeStake)\n\tsstxNullDataMis.SetIndex(0)\n\n\ttest, err = stake.IsSStx(sstxNullDataMis.MsgTx())\n\tif test || err.(stake.RuleError).GetCode() !=\n\t\tstake.ErrSStxInvalidOutputs {\n\t\tt.Errorf(\"IsSSTx should have returned false,%v but instead returned %v\"+\n\t\t\t\",%v\", stake.ErrSStxInvalidOutputs, test, err)\n\t}\n\n\t\/\/ ---------------------------------------------------------------------------\n\t\/\/ Test for too short of a pubkeyhash being given in an OP_RETURN output\n\n\ttestPKHLength := bytes.Replace(bufBytes,\n\t\t[]byte{\n\t\t\t0x20, 0x6a, 0x1e, 0x94, 0x8c, 0x76, 0x5a, 0x69,\n\t\t\t0x14, 0xd4, 0x3f, 0x2a, 0x7a, 0xc1, 0x77, 0xda,\n\t\t\t0x2c, 0x2f, 0x6b, 0x52, 0xde, 0x3d, 0x7c,\n\t\t},\n\t\t[]byte{\n\t\t\t0x1f, 0x6a, 0x1d, 0x94, 0x8c, 0x76, 0x5a, 0x69,\n\t\t\t0x14, 0xd4, 0x3f, 0x2a, 0x7a, 0xc1, 0x77, 0xda,\n\t\t\t0x2c, 0x2f, 0x6b, 0x52, 0xde, 0x3d,\n\t\t},\n\t\t1)\n\n\t\/\/ Deserialize the manipulated tx\n\trbuf = bytes.NewReader(testPKHLength)\n\terr = tx.Deserialize(rbuf)\n\tif err != nil {\n\t\tt.Errorf(\"Deserialize error %v\", err)\n\t}\n\n\tvar sstxWrongPKHLength = hcutil.NewTx(&tx)\n\tsstxWrongPKHLength.SetTree(wire.TxTreeStake)\n\tsstxWrongPKHLength.SetIndex(0)\n\n\ttest, err = stake.IsSStx(sstxWrongPKHLength.MsgTx())\n\tif test || err.(stake.RuleError).GetCode() !=\n\t\tstake.ErrSStxInvalidOutputs {\n\t\tt.Errorf(\"IsSSTx should have returned false,%v but instead returned %v\"+\n\t\t\t\",%v\", stake.ErrSStxInvalidOutputs, test, err)\n\t}\n\n\t\/\/ ---------------------------------------------------------------------------\n\t\/\/ Test for an invalid OP_RETURN prefix with too big of a push\n\ttooBigPush := []byte{\n\t\t0x6a, 0x4c, 0x4c, \/\/ OP_RETURN Push 76-bytes\n\t\t0x14, 0x94, 0x8c, 0x76, 0x5a, 0x69, 0x14, 0xd4, \/\/ 76 bytes\n\t\t0x3f, 0x2a, 0x7a, 0xc1, 0x77, 0xda, 0x2c, 0x2f,\n\t\t0x6b, 0x52, 0xde, 0x3d, 0x7c, 0x7c, 0x7c, 0x7c,\n\t\t0x6b, 0x52, 0xde, 0x3d, 0x7c, 0x7c, 0x7c, 0x7c,\n\t\t0x6b, 0x52, 0xde, 0x3d, 0x7c, 0x7c, 0x7c, 0x7c,\n\t\t0x6b, 0x52, 0xde, 0x3d, 0x7c, 0x7c, 0x7c, 0x7c,\n\t\t0x6b, 0x52, 0xde, 0x3d, 0x7c, 0x7c, 0x7c, 0x7c,\n\t\t0x6b, 0x52, 0xde, 0x3d, 0x7c, 0x7c, 0x7c, 0x7c,\n\t\t0x6b, 0x52, 0xde, 0x3d, 0x7c, 0x7c, 0x7c, 0x7c,\n\t\t0x6b, 0x52, 0xde, 0x3d,\n\t}\n\n\t\/\/ Deserialize the manipulated tx\n\trbuf = bytes.NewReader(bufBytes)\n\terr = tx.Deserialize(rbuf)\n\tif err != nil {\n\t\tt.Errorf(\"Deserialize error %v\", err)\n\t}\n\ttx.TxOut[1].PkScript = tooBigPush\n\n\tvar sstxWrongPrefix = hcutil.NewTx(&tx)\n\tsstxWrongPrefix.SetTree(wire.TxTreeStake)\n\tsstxWrongPrefix.SetIndex(0)\n\n\ttest, err = stake.IsSStx(sstxWrongPrefix.MsgTx())\n\tif test || err.(stake.RuleError).GetCode() !=\n\t\tstake.ErrSStxInvalidOutputs {\n\t\tt.Errorf(\"IsSSTx should have returned false,%v but instead returned %v\"+\n\t\t\t\",%v\", stake.ErrSStxInvalidOutputs, test, err)\n\t}\n}\n\n\/\/ SSGEN TESTING ------------------------------------------------------------------\n\nfunc TestIsSSGen(t *testing.T) {\n\tvar ssgen = hcutil.NewTx(ssgenMsgTx)\n\tssgen.SetTree(wire.TxTreeStake)\n\tssgen.SetIndex(0)\n\n\ttest, err := stake.IsSSGen(ssgen.MsgTx())\n\tif !test || err != nil {\n\t\tt.Errorf(\"IsSSGen should have returned true, but instead returned %v\"+\n\t\t\t\",%v\", test, err)\n\t}\n\n\t\/\/ Test for an OP_RETURN VoteBits push of the maximum size\n\tbiggestPush := []byte{\n\t\t0x6a, 0x4b, \/\/ OP_RETURN Push 75-bytes\n\t\t0x14, 0x94, 0x8c, 0x76, 0x5a, 0x69, 0x14, 0xd4, \/\/ 75 bytes\n\t\t0x3f, 0x2a, 0x7a, 0xc1, 0x77, 0xda, 0x2c, 0x2f,\n\t\t0x6b, 0x52, 0xde, 0x3d, 0x7c, 0x7c, 0x7c, 0x7c,\n\t\t0x6b, 0x52, 0xde, 0x3d, 0x7c, 0x7c, 0x7c, 0x7c,\n\t\t0x6b, 0x52, 0xde, 0x3d, 0x7c, 0x7c, 0x7c, 0x7c,\n\t\t0x6b, 0x52, 0xde, 0x3d, 0x7c, 0x7c, 0x7c, 0x7c,\n\t\t0x6b, 0x52, 0xde, 0x3d, 0x7c, 0x7c, 0x7c, 0x7c,\n\t\t0x6b, 0x52, 0xde, 0x3d, 0x7c, 0x7c, 0x7c, 0x7c,\n\t\t0x6b, 0x52, 0xde, 0x3d, 0x7c, 0x7c, 0x7c, 0x7c,\n\t\t0x6b, 0x52, 0xde,\n\t}\n\n\tssgen = hcutil.NewTxDeep(ssgenMsgTx)\n\tssgen.SetTree(wire.TxTreeStake)\n\tssgen.SetIndex(0)\n\tssgen.MsgTx().TxOut[1].PkScript = biggestPush\n\n\ttest, err = stake.IsSSGen(ssgen.MsgTx())\n\tif !test || err != nil {\n\t\tt.Errorf(\"IsSSGen should have returned true, but instead returned %v\"+\n\t\t\t\",%v\", test, err)\n\t}\n\n}\n\nfunc TestIsSSGenErrors(t *testing.T) {\n\t\/\/ Initialize the buffer for later manipulation\n\tvar buf bytes.Buffer\n\tbuf.Grow(ssgenMsgTx.SerializeSize())\n\terr := ssgenMsgTx.Serialize(&buf)\n\tif err != nil {\n\t\tt.Errorf(\"Error serializing the reference sstx: %v\", err)\n\t}\n\tbufBytes := buf.Bytes()\n\n\t\/\/ ---------------------------------------------------------------------------\n\t\/\/ Test too many inputs with ssgenMsgTxExtraInputs\n\n\tvar ssgenExtraInputs = hcutil.NewTx(ssgenMsgTxExtraInput)\n\tssgenExtraInputs.SetTree(wire.TxTreeStake)\n\tssgenExtraInputs.SetIndex(0)\n\n\ttest, err := stake.IsSSGen(ssgenExtraInputs.MsgTx())\n\tif test || err.(stake.RuleError).GetCode() !=\n\t\tstake.ErrSSGenWrongNumInputs {\n\t\tt.Errorf(\"IsSSGen should have returned false,%v but instead returned %v\"+\n\t\t\t\",%v\", stake.ErrSSGenWrongNumInputs, test, err)\n\t}\n\n\t\/\/ ---------------------------------------------------------------------------\n\t\/\/ Test too many outputs with sstxMsgTxExtraOutputs\n\n\tvar ssgenExtraOutputs = hcutil.NewTx(ssgenMsgTxExtraOutputs)\n\tssgenExtraOutputs.SetTree(wire.TxTreeStake)\n\tssgenExtraOutputs.SetIndex(0)\n\n\ttest, err = stake.IsSSGen(ssgenExtraOutputs.MsgTx())\n\tif test || err.(stake.RuleError).GetCode() !=\n\t\tstake.ErrSSGenTooManyOutputs {\n\t\tt.Errorf(\"IsSSGen should have returned false,%v but instead returned %v\"+\n\t\t\t\",%v\", stake.ErrSSGenTooManyOutputs, test, err)\n\t}\n\n\t\/\/ ---------------------------------------------------------------------------\n\t\/\/ Test 0th input not being stakebase error\n\n\tvar ssgenStakeBaseWrong = hcutil.NewTx(ssgenMsgTxStakeBaseWrong)\n\tssgenStakeBaseWrong.SetTree(wire.TxTreeStake)\n\tssgenStakeBaseWrong.SetIndex(0)\n\n\ttest, err = stake.IsSSGen(ssgenStakeBaseWrong.MsgTx())\n\tif test || err.(stake.RuleError).GetCode() !=\n\t\tstake.ErrSSGenNoStakebase {\n\t\tt.Errorf(\"IsSSGen should have returned false,%v but instead returned %v\"+\n\t\t\t\",%v\", stake.ErrSSGenNoStakebase, test, err)\n\t}\n\n\t\/\/ ---------------------------------------------------------------------------\n\t\/\/ Wrong tree for inputs test\n\n\t\/\/ Replace TxTreeStake with TxTreeRegular\n\ttestWrongTreeInputs := bytes.Replace(bufBytes,\n\t\t[]byte{0x79, 0xac, 0x88, 0xfd, 0xf3, 0x57, 0xa1, 0x87, 0x00,\n\t\t\t0x00, 0x00, 0x00, 0x01},\n\t\t[]byte{0x79, 0xac, 0x88, 0xfd, 0xf3, 0x57, 0xa1, 0x87, 0x00,\n\t\t\t0x00, 0x00, 0x00, 0x00},\n\t\t1)\n\n\t\/\/ Deserialize the manipulated tx\n\tvar tx wire.MsgTx\n\trbuf := bytes.NewReader(testWrongTreeInputs)\n\terr = tx.Deserialize(rbuf)\n\tif err != nil {\n\t\tt.Errorf(\"Deserialize error %v\", err)\n\t}\n\n\tvar ssgenWrongTreeIns = hcutil.NewTx(&tx)\n\tssgenWrongTreeIns.SetTree(wire.TxTreeStake)\n\tssgenWrongTreeIns.SetIndex(0)\n\n\ttest, err = stake.IsSSGen(ssgenWrongTreeIns.MsgTx())\n\tif test || err.(stake.RuleError).GetCode() !=\n\t\tstake.ErrSSGenWrongTxTree {\n\t\tt.Errorf(\"IsSSGen should have returned false,%v but instead returned %v\"+\n\t\t\t\",%v\", stake.ErrSSGenWrongTxTree, test, err)\n\t}\n\n\t\/\/ ---------------------------------------------------------------------------\n\t\/\/ Test for bad version of output.\n\tvar ssgenTxBadVerOut = hcutil.NewTx(ssgenMsgTxBadVerOut)\n\tssgenTxBadVerOut.SetTree(wire.TxTreeStake)\n\tssgenTxBadVerOut.SetIndex(0)\n\n\ttest, err = stake.IsSSGen(ssgenTxBadVerOut.MsgTx())\n\tif test || err.(stake.RuleError).GetCode() !=\n\t\tstake.ErrSSGenBadGenOuts {\n\t\tt.Errorf(\"IsSSGen should have returned false,%v but instead returned %v\"+\n\t\t\t\",%v\", stake.ErrSSGenBadGenOuts, test, err)\n\t}\n\n\t\/\/ ---------------------------------------------------------------------------\n\t\/\/ Test 0th output not being OP_RETURN push\n\n\tvar ssgenWrongZeroethOut = hcutil.NewTx(ssgenMsgTxWrongZeroethOut)\n\tssgenWrongZeroethOut.SetTree(wire.TxTreeStake)\n\tssgenWrongZeroethOut.SetIndex(0)\n\n\ttest, err = stake.IsSSGen(ssgenWrongZeroethOut.MsgTx())\n\tif test || err.(stake.RuleError).GetCode() !=\n\t\tstake.ErrSSGenNoReference {\n\t\tt.Errorf(\"IsSSGen should have returned false,%v but instead returned %v\"+\n\t\t\t\",%v\", stake.ErrSSGenNoReference, test, err)\n\t}\n\t\/\/ ---------------------------------------------------------------------------\n\t\/\/ Test for too short of an OP_RETURN push being given in the 0th tx out\n\n\ttestDataPush0Length := bytes.Replace(bufBytes,\n\t\t[]byte{\n\t\t\t0x26, 0x6a, 0x24,\n\t\t\t0x94, 0x8c, 0x76, 0x5a, 0x69, 0x14, 0xd4, 0x3f,\n\t\t\t0x2a, 0x7a, 0xc1, 0x77, 0xda, 0x2c, 0x2f, 0x6b,\n\t\t\t0x52, 0xde, 0x3d, 0x7c, 0xda, 0x2c, 0x2f, 0x6b,\n\t\t\t0x52, 0xde, 0x3d, 0x7c, 0x52, 0xde, 0x3d, 0x7c,\n\t\t\t0x00, 0xe3, 0x23, 0x21,\n\t\t},\n\t\t[]byte{\n\t\t\t0x25, 0x6a, 0x23,\n\t\t\t0x94, 0x8c, 0x76, 0x5a, 0x69, 0x14, 0xd4, 0x3f,\n\t\t\t0x2a, 0x7a, 0xc1, 0x77, 0xda, 0x2c, 0x2f, 0x6b,\n\t\t\t0x52, 0xde, 0x3d, 0x7c, 0xda, 0x2c, 0x2f, 0x6b,\n\t\t\t0x52, 0xde, 0x3d, 0x7c, 0x52, 0xde, 0x3d, 0x7c,\n\t\t\t0x00, 0xe3, 0x23,\n\t\t},\n\t\t1)\n\n\t\/\/ Deserialize the manipulated tx\n\trbuf = bytes.NewReader(testDataPush0Length)\n\terr = tx.Deserialize(rbuf)\n\tif err != nil {\n\t\tt.Errorf(\"Deserialize error %v\", err)\n\t}\n\n\tvar ssgenWrongDataPush0Length = hcutil.NewTx(&tx)\n\tssgenWrongDataPush0Length.SetTree(wire.TxTreeStake)\n\tssgenWrongDataPush0Length.SetIndex(0)\n\n\ttest, err = stake.IsSSGen(ssgenWrongDataPush0Length.MsgTx())\n\tif test || err.(stake.RuleError).GetCode() !=\n\t\tstake.ErrSSGenBadReference {\n\t\tt.Errorf(\"IsSSGen should have returned false,%v but instead returned %v\"+\n\t\t\t\",%v\", stake.ErrSSGenBadReference, test, err)\n\t}\n\n\t\/\/ ---------------------------------------------------------------------------\n\t\/\/ Test for an invalid OP_RETURN prefix\n\n\ttestNullData0Prefix := bytes.Replace(bufBytes,\n\t\t[]byte{\n\t\t\t0x26, 0x6a, 0x24,\n\t\t\t0x94, 0x8c, 0x76, 0x5a, 0x69, 0x14, 0xd4, 0x3f,\n\t\t\t0x2a, 0x7a, 0xc1, 0x77, 0xda, 0x2c, 0x2f, 0x6b,\n\t\t\t0x52, 0xde, 0x3d, 0x7c, 0xda, 0x2c, 0x2f, 0x6b,\n\t\t\t0x52, 0xde, 0x3d, 0x7c, 0x52, 0xde, 0x3d, 0x7c,\n\t\t\t0x00, 0xe3, 0x23, 0x21,\n\t\t},\n\t\t[]byte{ \/\/ This uses an OP_PUSHDATA_1 35-byte push to achieve 36 bytes\n\t\t\t0x26, 0x6a, 0x4c, 0x23,\n\t\t\t0x94, 0x8c, 0x76, 0x5a, 0x69, 0x14, 0xd4, 0x3f,\n\t\t\t0x2a, 0x7a, 0xc1, 0x77, 0xda, 0x2c, 0x2f, 0x6b,\n\t\t\t0x52, 0xde, 0x3d, 0x7c, 0xda, 0x2c, 0x2f, 0x6b,\n\t\t\t0x52, 0xde, 0x3d, 0x7c, 0x52, 0xde, 0x3d, 0x7c,\n\t\t\t0x00, 0xe3, 0x23,\n\t\t},\n\t\t1)\n\n\t\/\/ Deserialize the manipulated tx\n\trbuf = bytes.NewReader(testNullData0Prefix)\n\terr = tx.Deserialize(rbuf)\n\tif err != nil {\n\t\tt.Errorf(\"Deserialize error %v\", err)\n\t}\n\n\tvar ssgenWrongNullData0Prefix = hcutil.NewTx(&tx)\n\tssgenWrongNullData0Prefix.SetTree(wire.TxTreeStake)\n\tssgenWrongNullData0Prefix.SetIndex(0)\n\n\ttest, err = stake.IsSSGen(ssgenWrongNullData0Prefix.MsgTx())\n\tif test || err.(stake.RuleError).GetCode() !=\n\t\tstake.ErrSSGenBadReference {\n\t\tt.Errorf(\"IsSSGen should have returned false,%v but instead returned %v\"+\n\t\t\t\",%v\", stake.ErrSSGenBadReference, test, err)\n\t}\n\n\t\/\/ ---------------------------------------------------------------------------\n\t\/\/ Test 1st output not being OP_RETURN push\n\n\tvar ssgenWrongFirstOut = hcutil.NewTx(ssgenMsgTxWrongFirstOut)\n\tssgenWrongFirstOut.SetTree(wire.TxTreeStake)\n\tssgenWrongFirstOut.SetIndex(0)\n\n\ttest, err = stake.IsSSGen(ssgenWrongFirstOut.MsgTx())\n\tif test || err.(stake.RuleError).GetCode() !=\n\t\tstake.ErrSSGenNoVotePush {\n\t\tt.Errorf(\"IsSSGen should have returned false,%v but instead returned %v\"+\n\t\t\t\",%v\", stake.ErrSSGenNoVotePush, test, err)\n\t}\n\t\/\/ ---------------------------------------------------------------------------\n\t\/\/ Test for too short of an OP_RETURN push being given in the 1st tx out\n\ttestDataPush1Length := bytes.Replace(bufBytes,\n\t\t[]byte{\n\t\t\t0x04, 0x6a, 0x02, 0x94, 0x8c,\n\t\t},\n\t\t[]byte{\n\t\t\t0x03, 0x6a, 0x01, 0x94,\n\t\t},\n\t\t1)\n\n\t\/\/ Deserialize the manipulated tx\n\trbuf = bytes.NewReader(testDataPush1Length)\n\terr = tx.Deserialize(rbuf)\n\tif err != nil {\n\t\tt.Errorf(\"Deserialize error %v\", err)\n\t}\n\n\tvar ssgenWrongDataPush1Length = hcutil.NewTx(&tx)\n\tssgenWrongDataPush1Length.SetTree(wire.TxTreeStake)\n\tssgenWrongDataPush1Length.SetIndex(0)\n\n\ttest, err = stake.IsSSGen(ssgenWrongDataPush1Length.MsgTx())\n\tif test || err.(stake.RuleError).GetCode() !=\n\t\tstake.ErrSSGenBadVotePush {\n\t\tt.Errorf(\"IsSSGen should have returned false,%v but instead returned %v\"+\n\t\t\t\",%v\", stake.ErrSSGenBadVotePush, test, err)\n\t}\n\n\t\/\/ ---------------------------------------------------------------------------\n\t\/\/ Test for longer OP_RETURN push being given in the 1st tx out\n\ttestDataPush1Length = bytes.Replace(bufBytes,\n\t\t[]byte{\n\t\t\t0x04, 0x6a, 0x02, 0x94, 0x8c,\n\t\t},\n\t\t[]byte{\n\t\t\t0x06, 0x6a, 0x04, 0x94, 0x8c, 0x8c, 0x8c,\n\t\t},\n\t\t1)\n\n\t\/\/ Deserialize the manipulated tx\n\trbuf = bytes.NewReader(testDataPush1Length)\n\terr = tx.Deserialize(rbuf)\n\tif err != nil {\n\t\tt.Errorf(\"Deserialize error %v\", err)\n\t}\n\n\tvar ssgenLongDataPush1Length = hcutil.NewTx(&tx)\n\tssgenLongDataPush1Length.SetTree(wire.TxTreeStake)\n\tssgenLongDataPush1Length.SetIndex(0)\n\n\ttest, err = stake.IsSSGen(ssgenLongDataPush1Length.MsgTx())\n\tif !test || err != nil {\n\t\tt.Errorf(\"IsSSGen should have returned false,%v but instead returned %v\"+\n\t\t\t\",%v\", stake.ErrSSGenBadVotePush, test, err)\n\t}\n\n\t\/\/ ---------------------------------------------------------------------------\n\t\/\/ Test for an invalid OP_RETURN prefix\n\n\ttestNullData1Prefix := bytes.Replace(bufBytes,\n\t\t[]byte{\n\t\t\t0x04, 0x6a, 0x02, 0x94, 0x8c,\n\t\t},\n\t\t[]byte{ \/\/ This uses an OP_PUSHDATA_1 2-byte push to do the push in 5 bytes\n\t\t\t0x05, 0x6a, 0x4c, 0x02, 0x00, 0x00,\n\t\t},\n\t\t1)\n\n\t\/\/ Deserialize the manipulated tx\n\trbuf = bytes.NewReader(testNullData1Prefix)\n\terr = tx.Deserialize(rbuf)\n\tif err != nil {\n\t\tt.Errorf(\"Deserialize error %v\", err)\n\t}\n\n\tvar ssgenWrongNullData1Prefix = hcutil.NewTx(&tx)\n\tssgenWrongNullData1Prefix.SetTree(wire.TxTreeStake)\n\tssgenWrongNullData1Prefix.SetIndex(0)\n\n\ttest, err = stake.IsSSGen(ssgenWrongNullData1Prefix.MsgTx())\n\tif test || err.(stake.RuleError).GetCode() !=\n\t\tstake.ErrSSGenBadVotePush {\n\t\tt.Errorf(\"IsSSGen should have returned false,%v but instead returned %v\"+\n\t\t\t\",%v\", stake.ErrSSGenBadVotePush, test, err)\n\t}\n\n\t\/\/ ---------------------------------------------------------------------------\n\t\/\/ Test for an index 2+ output being not OP_SSGEN tagged\n\n\ttestGenOutputUntagged := bytes.Replace(bufBytes,\n\t\t[]byte{\n\t\t\t0x1a, 0xbb, 0x76, 0xa9, 0x14, 0xc3, 0x98,\n\t\t},\n\t\t[]byte{\n\t\t\t0x19, 0x76, 0xa9, 0x14, 0xc3, 0x98,\n\t\t},\n\t\t1)\n\n\t\/\/ Deserialize the manipulated tx\n\trbuf = bytes.NewReader(testGenOutputUntagged)\n\terr = tx.Deserialize(rbuf)\n\tif err != nil {\n\t\tt.Errorf(\"Deserialize error %v\", err)\n\t}\n\n\tvar ssgentestGenOutputUntagged = hcutil.NewTx(&tx)\n\tssgentestGenOutputUntagged.SetTree(wire.TxTreeStake)\n\tssgentestGenOutputUntagged.SetIndex(0)\n\n\ttest, err = stake.IsSSGen(ssgentestGenOutputUntagged.MsgTx())\n\tif test || err.(stake.RuleError).GetCode() !=\n\t\tstake.ErrSSGenBadGenOuts {\n\t\tt.Errorf(\"IsSSGen should have returned false,%v but instead returned %v\"+\n\t\t\t\",%v\", stake.ErrSSGenBadGenOuts, test, err)\n\t}\n}\n\n\/\/ SSRTX TESTING ------------------------------------------------------------------\n\nfunc TestIsSSRtx(t *testing.T) {\n\tvar ssrtx = hcutil.NewTx(ssrtxMsgTx)\n\tssrtx.SetTree(wire.TxTreeStake)\n\tssrtx.SetIndex(0)\n\n\ttest, err := stake.IsSSRtx(ssrtx.MsgTx())\n\tif !test || err != nil {\n\t\tt.Errorf(\"IsSSRtx should have returned true, but instead returned %v\"+\n\t\t\t\",%v\", test, err)\n\t}\n}\n\nfunc TestIsSSRtxErrors(t *testing.T) {\n\t\/\/ Initialize the buffer for later manipulation\n\tvar buf bytes.Buffer\n\tbuf.Grow(ssrtxMsgTx.SerializeSize())\n\terr := ssrtxMsgTx.Serialize(&buf)\n\tif err != nil {\n\t\tt.Errorf(\"Error serializing the reference sstx: %v\", err)\n\t}\n\tbufBytes := buf.Bytes()\n\n\t\/\/ ---------------------------------------------------------------------------\n\t\/\/ Test too many inputs with ssrtxMsgTxTooManyInputs\n\n\tvar ssrtxTooManyInputs = hcutil.NewTx(ssrtxMsgTxTooManyInputs)\n\tssrtxTooManyInputs.SetTree(wire.TxTreeStake)\n\tssrtxTooManyInputs.SetIndex(0)\n\n\ttest, err := stake.IsSSRtx(ssrtxTooManyInputs.MsgTx())\n\tif test || err.(stake.RuleError).GetCode() !=\n\t\tstake.ErrSSRtxWrongNumInputs {\n\t\tt.Errorf(\"IsSSRtx should have returned false,%v but instead returned %v\"+\n\t\t\t\",%v\", stake.ErrSSRtxWrongNumInputs, test, err)\n\t}\n\n\t\/\/ ---------------------------------------------------------------------------\n\t\/\/ Test too many outputs with ssrtxMsgTxTooManyOutputs\n\n\tvar ssrtxTooManyOutputs = hcutil.NewTx(ssrtxMsgTxTooManyOutputs)\n\tssrtxTooManyOutputs.SetTree(wire.TxTreeStake)\n\tssrtxTooManyOutputs.SetIndex(0)\n\n\ttest, err = stake.IsSSRtx(ssrtxTooManyOutputs.MsgTx())\n\tif test || err.(stake.RuleError).GetCode() !=\n\t\tstake.ErrSSRtxTooManyOutputs {\n\t\tt.Errorf(\"IsSSRtx should have returned false,%v but instead returned %v\"+\n\t\t\t\",%v\", stake.ErrSSRtxTooManyOutputs, test, err)\n\t}\n\n\t\/\/ ---------------------------------------------------------------------------\n\t\/\/ Test for bad version of output.\n\tvar ssrtxTxBadVerOut = hcutil.NewTx(ssrtxMsgTxBadVerOut)\n\tssrtxTxBadVerOut.SetTree(wire.TxTreeStake)\n\tssrtxTxBadVerOut.SetIndex(0)\n\n\ttest, err = stake.IsSSRtx(ssrtxTxBadVerOut.MsgTx())\n\tif test || err.(stake.RuleError).GetCode() !=\n\t\tstake.ErrSSRtxBadOuts {\n\t\tt.Errorf(\"IsSSRtx should have returned false,%v but instead returned %v\"+\n\t\t\t\",%v\", stake.ErrSSRtxBadOuts, test, err)\n\t}\n\n\t\/\/ ---------------------------------------------------------------------------\n\t\/\/ Test for an index 0+ output being not OP_SSRTX tagged\n\ttestRevocOutputUntagged := bytes.Replace(bufBytes,\n\t\t[]byte{\n\t\t\t0x1a, 0xbc, 0x76, 0xa9, 0x14, 0xc3, 0x98,\n\t\t},\n\t\t[]byte{\n\t\t\t0x19, 0x76, 0xa9, 0x14, 0xc3, 0x98,\n\t\t},\n\t\t1)\n\n\t\/\/ Deserialize the manipulated tx\n\tvar tx wire.MsgTx\n\trbuf := bytes.NewReader(testRevocOutputUntagged)\n\terr = tx.Deserialize(rbuf)\n\tif err != nil {\n\t\tt.Errorf(\"Deserialize error %v\", err)\n\t}\n\n\tvar ssrtxTestRevocOutputUntagged = hcutil.NewTx(&tx)\n\tssrtxTestRevocOutputUntagged.SetTree(wire.TxTreeStake)\n\tssrtxTestRevocOutputUntagged.SetIndex(0)\n\n\ttest, err = stake.IsSSRtx(ssrtxTestRevocOutputUntagged.MsgTx())\n\tif test || err.(stake.RuleError).GetCode() !=\n\t\tstake.ErrSSRtxBadOuts {\n\t\tt.Errorf(\"IsSSGen should have returned false,%v but instead returned %v\"+\n\t\t\t\",%v\", stake.ErrSSRtxBadOuts, test, err)\n\t}\n\n\t\/\/ ---------------------------------------------------------------------------\n\t\/\/ Wrong tree for inputs test\n\n\t\/\/ Replace TxTreeStake with TxTreeRegular\n\ttestWrongTreeInputs := bytes.Replace(bufBytes,\n\t\t[]byte{0x79, 0xac, 0x88, 0xfd, 0xf3, 0x57, 0xa1, 0x87, 0x00,\n\t\t\t0x00, 0x00, 0x00, 0x01},\n\t\t[]byte{0x79, 0xac, 0x88, 0xfd, 0xf3, 0x57, 0xa1, 0x87, 0x00,\n\t\t\t0x00, 0x00, 0x00, 0x00},\n\t\t1)\n\n\t\/\/ Deserialize the manipulated tx\n\trbuf = bytes.NewReader(testWrongTreeInputs)\n\terr = tx.Deserialize(rbuf)\n\tif err != nil {\n\t\tt.Errorf(\"Deserialize error %v\", err)\n\t}\n\n\tvar ssrtxWrongTreeIns = hcutil.NewTx(&tx)\n\tssrtxWrongTreeIns.SetTree(wire.TxTreeStake)\n\tssrtxWrongTreeIns.SetIndex(0)\n\n\ttest, err = stake.IsSSRtx(ssrtxWrongTreeIns.MsgTx())\n\tif test || err.(stake.RuleError).GetCode() !=\n\t\tstake.ErrSSRtxWrongTxTree {\n\t\tt.Errorf(\"IsSSRtx should have returned false,%v but instead returned %v\"+\n\t\t\t\",%v\", stake.ErrSSGenWrongTxTree, test, err)\n\t}\n}\n\n\/\/ --------------------------------------------------------------------------------\n\/\/ Minor function testing\nfunc TestGetSSGenBlockVotedOn(t *testing.T) {\n\tvar ssgen = hcutil.NewTx(ssgenMsgTx)\n\tssgen.SetTree(wire.TxTreeStake)\n\tssgen.SetIndex(0)\n\n\tblockHash, height, err := stake.SSGenBlockVotedOn(ssgen.MsgTx())\n\n\tcorrectBlockHash, _ := chainhash.NewHash(\n\t\t[]byte{\n\t\t\t0x94, 0x8c, 0x76, 0x5a, \/\/ 32 byte hash\n\t\t\t0x69, 0x14, 0xd4, 0x3f,\n\t\t\t0x2a, 0x7a, 0xc1, 0x77,\n\t\t\t0xda, 0x2c, 0x2f, 0x6b,\n\t\t\t0x52, 0xde, 0x3d, 0x7c,\n\t\t\t0xda, 0x2c, 0x2f, 0x6b,\n\t\t\t0x52, 0xde, 0x3d, 0x7c,\n\t\t\t0x52, 0xde, 0x3d, 0x7c,\n\t\t})\n\n\tcorrectheight := uint32(0x2123e300)\n\n\tif err != nil {\n\t\tt.Errorf(\"Error thrown on TestGetSSGenBlockVotedOn: %v\", err)\n\t}\n\n\tif !reflect.DeepEqual(blockHash, *correctBlockHash) {\n\t\tt.Errorf(\"Error thrown on TestGetSSGenBlockVotedOn: Looking for \"+\n\t\t\t\"hash %v, got hash %v: %v\", *correctBlockHash, blockHash, err)\n\t}\n\n\tif height != correctheight {\n\t\tt.Errorf(\"Error thrown on TestGetSSGenBlockVotedOn: Looking for \"+\n\t\t\t\"height %v, got height %v: %v\", correctheight, height, err)\n\t}\n}\n\nfunc TestGetSStxStakeOutputInfo(t *testing.T) {\n\tvar sstx = hcutil.NewTx(sstxMsgTx)\n\tsstx.SetTree(wire.TxTreeStake)\n\tsstx.SetIndex(0)\n\n\tcorrectTyp := true\n\n\tcorrectPkh := []byte{0x94, 0x8c, 0x76, 0x5a, \/\/ 20 byte address\n\t\t0x69, 0x14, 0xd4, 0x3f,\n\t\t0x2a, 0x7a, 0xc1, 0x77,\n\t\t0xda, 0x2c, 0x2f, 0x6b,\n\t\t0x52, 0xde, 0x3d, 0x7c,\n\t}\n\n\tcorrectAmt := int64(0x2123e300)\n\n\tcorrectChange := int64(0x2223e300)\n\n\tcorrectRule := true\n\n\tcorrectLimit := uint16(4)\n\n\ttyps, pkhs, amts, changeAmts, rules, limits, _ :=\n\t\tstake.TxSStxStakeOutputInfo(sstx.MsgTx())\n\n\tif typs[2] != correctTyp {\n\t\tt.Errorf(\"Error thrown on TestGetSStxStakeOutputInfo: Looking for \"+\n\t\t\t\"type %v, got type %v\", correctTyp, typs[1])\n\t}\n\n\tif !reflect.DeepEqual(pkhs[1], correctPkh) {\n\t\tt.Errorf(\"Error thrown on TestGetSStxStakeOutputInfo: Looking for \"+\n\t\t\t\"pkh %v, got pkh %v\", correctPkh, pkhs[1])\n\t}\n\n\tif amts[1] != correctAmt {\n\t\tt.Errorf(\"Error thrown on TestGetSStxStakeOutputInfo: Looking for \"+\n\t\t\t\"amount %v, got amount %v\", correctAmt, amts[1])\n\t}\n\n\tif changeAmts[1] != correctChange {\n\t\tt.Errorf(\"Error thrown on TestGetSStxStakeOutputInfo: Looking for \"+\n\t\t\t\"amount %v, got amount %v\", correctChange, changeAmts[1])\n\t}\n\n\tif rules[1][0] != correctRule {\n\t\tt.Errorf(\"Error thrown on TestGetSStxStakeOutputInfo: Looking for \"+\n\t\t\t\"rule %v, got rule %v\", correctRule, rules[1][0])\n\t}\n\n\tif limits[1][0] != correctLimit {\n\t\tt.Errorf(\"Error thrown on TestGetSStxStakeOutputInfo: Looking for \"+\n\t\t\t\"limit %v, got limit %v\", correctLimit, rules[1][0])\n\t}\n}\n\nfunc TestGetSSGenStakeOutputInfo(t *testing.T) {\n\tvar ssgen = hcutil.NewTx(ssgenMsgTx)\n\tssgen.SetTree(wire.TxTreeStake)\n\tssgen.SetIndex(0)\n\n\tcorrectTyp := false\n\n\tcorrectpkh := []byte{0xc3, 0x98, 0xef, 0xa9,\n\t\t0xc3, 0x92, 0xba, 0x60,\n\t\t0x13, 0xc5, 0xe0, 0x4e,\n\t\t0xe7, 0x29, 0x75, 0x5e,\n\t\t0xf7, 0xf5, 0x8b, 0x32,\n\t}\n\n\tcorrectamt := int64(0x2123e300)\n\n\ttyps, pkhs, amts, err := stake.TxSSGenStakeOutputInfo(ssgen.MsgTx(),\n\t\t&chaincfg.SimNetParams)\n\tif err != nil {\n\t\tt.Errorf(\"Got unexpected error: %v\", err.Error())\n\t}\n\n\tif typs[0] != correctTyp {\n\t\tt.Errorf(\"Error thrown on TestGetSSGenStakeOutputInfo: Looking for \"+\n\t\t\t\"type %v, got type %v\", correctamt, amts[0])\n\t}\n\n\tif !reflect.DeepEqual(pkhs[0], correctpkh) {\n\t\tt.Errorf(\"Error thrown on TestGetSSGenStakeOutputInfo: Looking for \"+\n\t\t\t\"pkh %v, got pkh %v\", correctpkh, pkhs[0])\n\t}\n\n\tif amts[0] != correctamt {\n\t\tt.Errorf(\"Error thrown on TestGetSSGenStakeOutputInfo: Looking for \"+\n\t\t\t\"amount %v, got amount %v\", correctamt, amts[0])\n\t}\n}\n\nfunc TestGetSSGenVoteBits(t *testing.T) {\n\tvar ssgen = hcutil.NewTx(ssgenMsgTx)\n\tssgen.SetTree(wire.TxTreeStake)\n\tssgen.SetIndex(0)\n\n\tcorrectvbs := uint16(0x8c94)\n\n\tvotebits := stake.SSGenVoteBits(ssgen.MsgTx())\n\n\tif correctvbs != votebits {\n\t\tt.Errorf(\"Error thrown on TestGetSSGenVoteBits: Looking for \"+\n\t\t\t\"vbs % x, got vbs % x\", correctvbs, votebits)\n\t}\n}\n\nfunc TestGetSSGenVersion(t *testing.T) {\n\tvar ssgen = ssgenMsgTx.Copy()\n\n\tmissingVersion := uint32(stake.VoteConsensusVersionAbsent)\n\tversion := stake.SSGenVersion(ssgen)\n\tif version != missingVersion {\n\t\tt.Errorf(\"Error thrown on TestGetSSGenVersion: Looking for \"+\n\t\t\t\"version % x, got version % x\", missingVersion, version)\n\t}\n\n\tvbBytes := []byte{0x01, 0x00, 0x01, 0xef, 0xcd, 0xab}\n\texpectedVersion := uint32(0xabcdef01)\n\tpkScript, err := txscript.GenerateProvablyPruneableOut(vbBytes)\n\tif err != nil {\n\t\tt.Errorf(\"GenerateProvablyPruneableOut error %v\", err)\n\t}\n\tssgen.TxOut[1].PkScript = pkScript\n\tversion = stake.SSGenVersion(ssgen)\n\n\tif version != expectedVersion {\n\t\tt.Errorf(\"Error thrown on TestGetSSGenVersion: Looking for \"+\n\t\t\t\"version % x, got version % x\", expectedVersion, version)\n\t}\n}\n\nfunc TestGetSSRtxStakeOutputInfo(t *testing.T) {\n\tvar ssrtx = hcutil.NewTx(ssrtxMsgTx)\n\tssrtx.SetTree(wire.TxTreeStake)\n\tssrtx.SetIndex(0)\n\n\tcorrectTyp := false\n\n\tcorrectPkh := []byte{0xc3, 0x98, 0xef, 0xa9,\n\t\t0xc3, 0x92, 0xba, 0x60,\n\t\t0x13, 0xc5, 0xe0, 0x4e,\n\t\t0xe7, 0x29, 0x75, 0x5e,\n\t\t0xf7, 0xf5, 0x8b, 0x33,\n\t}\n\n\tcorrectAmt := int64(0x2122e300)\n\n\ttyps, pkhs, amts, err := stake.TxSSRtxStakeOutputInfo(ssrtx.MsgTx(),\n\t\t&chaincfg.SimNetParams)\n\tif err != nil {\n\t\tt.Errorf(\"Got unexpected error: %v\", err.Error())\n\t}\n\n\tif typs[0] != correctTyp {\n\t\tt.Errorf(\"Error thrown on TestGetSStxStakeOutputInfo: Looking for \"+\n\t\t\t\"type %v, got type %v\", correctTyp, typs[0])\n\t}\n\n\tif !reflect.DeepEqual(pkhs[0], correctPkh) {\n\t\tt.Errorf(\"Error thrown on TestGetSStxStakeOutputInfo: Looking for \"+\n\t\t\t\"pkh %v, got pkh %v\", correctPkh, pkhs[0])\n\t}\n\n\tif amts[0] != correctAmt {\n\t\tt.Errorf(\"Error thrown on TestGetSStxStakeOutputInfo: Looking for \"+\n\t\t\t\"amount %v, got amount %v\", correctAmt, amts[0])\n\t}\n}\n\nfunc TestGetSStxNullOutputAmounts(t *testing.T) {\n\tcommitAmts := []int64{int64(0x2122e300),\n\t\tint64(0x12000000),\n\t\tint64(0x12300000)}\n\tchangeAmts := []int64{int64(0x0122e300),\n\t\tint64(0x02000000),\n\t\tint64(0x02300000)}\n\tamtTicket := int64(0x9122e300)\n\n\t_, _, err := stake.SStxNullOutputAmounts(\n\t\t[]int64{\n\t\t\tint64(0x12000000),\n\t\t\tint64(0x12300000),\n\t\t},\n\t\tchangeAmts,\n\t\tamtTicket)\n\n\t\/\/ len commit to amts != len change amts\n\tlenErrStr := \"amounts was not equal in length \" +\n\t\t\"to change amounts!\"\n\tif err == nil || err.Error() != lenErrStr {\n\t\tt.Errorf(\"TestGetSStxNullOutputAmounts unexpected error: %v\", err)\n\t}\n\n\t\/\/ too small amount to commit\n\t_, _, err = stake.SStxNullOutputAmounts(\n\t\tcommitAmts,\n\t\tchangeAmts,\n\t\tint64(0x00000000))\n\ttooSmallErrStr := \"committed amount was too small!\"\n\tif err == nil || err.Error() != tooSmallErrStr {\n\t\tt.Errorf(\"TestGetSStxNullOutputAmounts unexpected error: %v\", err)\n\t}\n\n\t\/\/ overspending error\n\ttooMuchChangeAmts := []int64{int64(0x0122e300),\n\t\tint64(0x02000000),\n\t\tint64(0x12300001)}\n\n\t_, _, err = stake.SStxNullOutputAmounts(\n\t\tcommitAmts,\n\t\ttooMuchChangeAmts,\n\t\tint64(0x00000020))\n\tif err == nil || err.(stake.RuleError).GetCode() !=\n\t\tstake.ErrSStxBadChangeAmts {\n\t\tt.Errorf(\"TestGetSStxNullOutputAmounts unexpected error: %v\", err)\n\t}\n\n\tfees, amts, err := stake.SStxNullOutputAmounts(commitAmts,\n\t\tchangeAmts,\n\t\tamtTicket)\n\n\tif err != nil {\n\t\tt.Errorf(\"TestGetSStxNullOutputAmounts unexpected error: %v\", err)\n\t}\n\n\texpectedFees := int64(-1361240832)\n\n\tif expectedFees != fees {\n\t\tt.Errorf(\"TestGetSStxNullOutputAmounts error, wanted %v, \"+\n\t\t\t\"but got %v\", expectedFees, fees)\n\t}\n\n\texpectedAmts := []int64{int64(0x20000000),\n\t\tint64(0x10000000),\n\t\tint64(0x10000000),\n\t}\n\n\tif !reflect.DeepEqual(expectedAmts, amts) {\n\t\tt.Errorf(\"TestGetSStxNullOutputAmounts error, wanted %v, \"+\n\t\t\t\"but got %v\", expectedAmts, amts)\n\t}\n}\n\nfunc TestGetStakeRewards(t *testing.T) {\n\t\/\/ SSGen example with >0 subsidy\n\tamounts := []int64{int64(21000000),\n\t\tint64(11000000),\n\t\tint64(10000000),\n\t}\n\tamountTicket := int64(42000000)\n\tsubsidy := int64(400000)\n\n\toutAmts := stake.CalculateRewards(amounts, amountTicket, subsidy)\n\n\t\/\/ SSRtx example with 0 subsidy\n\texpectedAmts := []int64{int64(21200000),\n\t\tint64(11104761),\n\t\tint64(10095238),\n\t}\n\n\tif !reflect.DeepEqual(expectedAmts, outAmts) {\n\t\tt.Errorf(\"TestGetStakeRewards error, wanted %v, \"+\n\t\t\t\"but got %v\", expectedAmts, outAmts)\n\t}\n}\n\nfunc TestVerifySStxAmounts(t *testing.T) {\n\tamounts := []int64{int64(21000000),\n\t\tint64(11000000),\n\t\tint64(10000000),\n\t}\n\tcalcAmounts := []int64{int64(21000000),\n\t\tint64(11000000),\n\t\tint64(10000000),\n\t}\n\n\t\/\/ len error for slices\n\tcalcAmountsBad := []int64{int64(11000000),\n\t\tint64(10000000),\n\t}\n\terr := stake.VerifySStxAmounts(amounts,\n\t\tcalcAmountsBad)\n\tif err == nil || err.(stake.RuleError).GetCode() !=\n\t\tstake.ErrVerSStxAmts {\n\t\tt.Errorf(\"TestVerifySStxAmounts unexpected error: %v\", err)\n\t}\n\n\t\/\/ non-congruent slices error\n\tcalcAmountsBad = []int64{int64(21000000),\n\t\tint64(11000000),\n\t\tint64(10000001),\n\t}\n\terr = stake.VerifySStxAmounts(amounts,\n\t\tcalcAmountsBad)\n\tif err == nil || err.(stake.RuleError).GetCode() !=\n\t\tstake.ErrVerSStxAmts {\n\t\tt.Errorf(\"TestVerifySStxAmounts unexpected error: %v\", err)\n\t}\n\n\terr = stake.VerifySStxAmounts(amounts,\n\t\tcalcAmounts)\n\tif err != nil {\n\t\tt.Errorf(\"TestVerifySStxAmounts unexpected error: %v\", err)\n\t}\n}\n\nfunc TestVerifyStakingPkhsAndAmounts(t *testing.T) {\n\ttypes := []bool{false, false}\n\tamounts := []int64{int64(21000000),\n\t\tint64(11000000),\n\t}\n\tpkhs := [][]byte{\n\t\t{0x00, 0x01, 0x02, 0x00,\n\t\t\t0x00, 0x01, 0x02, 0x00,\n\t\t\t0x00, 0x01, 0x02, 0x00,\n\t\t\t0x00, 0x01, 0x02, 0x00,\n\t\t\t0x00, 0x01, 0x02, 0x00},\n\t\t{0x00, 0x01, 0x02, 0x00,\n\t\t\t0x00, 0x01, 0x02, 0x00,\n\t\t\t0x00, 0x01, 0x04, 0x00,\n\t\t\t0x00, 0x01, 0x02, 0x00,\n\t\t\t0x00, 0x01, 0x02, 0x03}}\n\tspendTypes := []bool{false, false}\n\tspendAmounts := []int64{int64(21000000),\n\t\tint64(11000000),\n\t}\n\tspendPkhs := [][]byte{\n\t\t{0x00, 0x01, 0x02, 0x00,\n\t\t\t0x00, 0x01, 0x02, 0x00,\n\t\t\t0x00, 0x01, 0x02, 0x00,\n\t\t\t0x00, 0x01, 0x02, 0x00,\n\t\t\t0x00, 0x01, 0x02, 0x00},\n\t\t{0x00, 0x01, 0x02, 0x00,\n\t\t\t0x00, 0x01, 0x02, 0x00,\n\t\t\t0x00, 0x01, 0x04, 0x00,\n\t\t\t0x00, 0x01, 0x02, 0x00,\n\t\t\t0x00, 0x01, 0x02, 0x03}}\n\tspendRules := [][]bool{\n\t\t{false, false},\n\t\t{false, false}}\n\tspendLimits := [][]uint16{\n\t\t{16, 20},\n\t\t{16, 20}}\n\n\t\/\/ bad types len\n\tspendTypesBad := []bool{false}\n\terr := stake.VerifyStakingPkhsAndAmounts(types,\n\t\tpkhs,\n\t\tamounts,\n\t\tspendTypesBad,\n\t\tspendPkhs,\n\t\tspendAmounts,\n\t\ttrue, \/\/ Vote\n\t\tspendRules,\n\t\tspendLimits)\n\tif err == nil || err.(stake.RuleError).GetCode() !=\n\t\tstake.ErrVerifyInput {\n\t\tt.Errorf(\"TestVerifyStakingPkhsAndAmounts unexpected error: %v\", err)\n\t}\n\n\t\/\/ bad types\n\tspendTypesBad = []bool{false, true}\n\terr = stake.VerifyStakingPkhsAndAmounts(types,\n\t\tpkhs,\n\t\tamounts,\n\t\tspendTypesBad,\n\t\tspendPkhs,\n\t\tspendAmounts,\n\t\ttrue, \/\/ Vote\n\t\tspendRules,\n\t\tspendLimits)\n\tif err == nil || err.(stake.RuleError).GetCode() !=\n\t\tstake.ErrVerifyOutType {\n\t\tt.Errorf(\"TestVerifyStakingPkhsAndAmounts unexpected error: %v\", err)\n\t}\n\n\t\/\/ len error for amt slices\n\tspendAmountsBad := []int64{int64(11000111)}\n\terr = stake.VerifyStakingPkhsAndAmounts(types,\n\t\tpkhs,\n\t\tamounts,\n\t\tspendTypes,\n\t\tspendPkhs,\n\t\tspendAmountsBad,\n\t\ttrue, \/\/ Vote\n\t\tspendRules,\n\t\tspendLimits)\n\tif err == nil || err.(stake.RuleError).GetCode() !=\n\t\tstake.ErrVerifyInput {\n\t\tt.Errorf(\"TestVerifyStakingPkhsAndAmounts unexpected error: %v\", err)\n\t}\n\n\t\/\/ len error for pks slices\n\tspendPkhsBad := [][]byte{\n\t\t{0x00, 0x01, 0x02, 0x00,\n\t\t\t0x00, 0x01, 0x02, 0x00,\n\t\t\t0x00, 0x01, 0x02, 0x00,\n\t\t\t0x00, 0x01, 0x02, 0x00,\n\t\t\t0x00, 0x01, 0x02, 0x00},\n\t}\n\terr = stake.VerifyStakingPkhsAndAmounts(types,\n\t\tpkhs,\n\t\tamounts,\n\t\tspendTypes,\n\t\tspendPkhsBad,\n\t\tspendAmounts,\n\t\ttrue, \/\/ Vote\n\t\tspendRules,\n\t\tspendLimits)\n\tif err == nil || err.(stake.RuleError).GetCode() !=\n\t\tstake.ErrVerifyInput {\n\t\tt.Errorf(\"TestVerifyStakingPkhsAndAmounts unexpected error: %v\", err)\n\t}\n\n\t\/\/ amount non-equivalence in position 1\n\tspendAmountsNonequiv := []int64{int64(21000000),\n\t\tint64(11000000)}\n\tspendAmountsNonequiv[1]--\n\n\terr = stake.VerifyStakingPkhsAndAmounts(types,\n\t\tpkhs,\n\t\tamounts,\n\t\tspendTypes,\n\t\tspendPkhs,\n\t\tspendAmountsNonequiv,\n\t\ttrue, \/\/ Vote\n\t\tspendRules,\n\t\tspendLimits)\n\tif err == nil || err.(stake.RuleError).GetCode() !=\n\t\tstake.ErrVerifyOutputAmt {\n\t\tt.Errorf(\"TestVerifyStakingPkhsAndAmounts unexpected error: %v\", err)\n\t}\n\n\t\/\/ pkh non-equivalence in position 1\n\tspendPkhsNonequiv := [][]byte{\n\t\t{0x00, 0x01, 0x02, 0x00,\n\t\t\t0x00, 0x01, 0x02, 0x00,\n\t\t\t0x00, 0x01, 0x02, 0x00,\n\t\t\t0x00, 0x01, 0x02, 0x00,\n\t\t\t0x00, 0x01, 0x02, 0x00},\n\t\t{0x00, 0x01, 0x02, 0x00,\n\t\t\t0x00, 0x01, 0x02, 0x00,\n\t\t\t0x00, 0x01, 0x04, 0x00,\n\t\t\t0x00, 0x01, 0x02, 0x00,\n\t\t\t0x00, 0x01, 0x02, 0x04}}\n\n\terr = stake.VerifyStakingPkhsAndAmounts(types,\n\t\tpkhs,\n\t\tamounts,\n\t\tspendTypes,\n\t\tspendPkhsNonequiv,\n\t\tspendAmounts,\n\t\ttrue, \/\/ Vote\n\t\tspendRules,\n\t\tspendLimits)\n\tif err == nil || err.(stake.RuleError).GetCode() !=\n\t\tstake.ErrVerifyOutPkhs {\n\t\tt.Errorf(\"TestVerifyStakingPkhsAndAmounts unexpected error: %v\", err)\n\t}\n\n\t\/\/ rule non-equivalence in position 1\n\tspendRulesNonequivV := [][]bool{\n\t\t{false, false},\n\t\t{true, false}}\n\tspendAmountsNonequivV := []int64{int64(21000000),\n\t\tint64(10934463)}\n\tspendAmountsNonequivVTooBig := []int64{int64(21000000),\n\t\tint64(11000001)}\n\n\tspendRulesNonequivR := [][]bool{\n\t\t{false, false},\n\t\t{false, true}}\n\tspendAmountsNonequivR := []int64{int64(21000000),\n\t\tint64(9951423)}\n\n\t\/\/ vote\n\t\/\/ original amount: 11000000\n\t\/\/ with the flag enabled, the minimum allowed to be spent is\n\t\/\/ 11000000 - 1 << 16 = 10934464\n\t\/\/ So, 10934464 should pass while 10934463 should fail.\n\terr = stake.VerifyStakingPkhsAndAmounts(types,\n\t\tpkhs,\n\t\tspendAmountsNonequivV,\n\t\tspendTypes,\n\t\tspendPkhs,\n\t\tamounts,\n\t\ttrue, \/\/ Vote\n\t\tspendRulesNonequivV,\n\t\tspendLimits)\n\tif err == nil || err.(stake.RuleError).GetCode() !=\n\t\tstake.ErrVerifyTooMuchFees {\n\t\tt.Errorf(\"TestVerifyStakingPkhsAndAmounts unexpected error: %v\", err)\n\t}\n\n\t\/\/ original amount: 11000000\n\t\/\/ the maximum allows to be spent is 11000000\n\terr = stake.VerifyStakingPkhsAndAmounts(types,\n\t\tpkhs,\n\t\tspendAmountsNonequivVTooBig,\n\t\tspendTypes,\n\t\tspendPkhs,\n\t\tamounts,\n\t\ttrue, \/\/ Vote\n\t\tspendRulesNonequivV,\n\t\tspendLimits)\n\tif err == nil || err.(stake.RuleError).GetCode() !=\n\t\tstake.ErrVerifySpendTooMuch {\n\t\tt.Errorf(\"TestVerifyStakingPkhsAndAmounts unexpected error: %v\", err)\n\t}\n\n\t\/\/ revocation\n\t\/\/ original amount: 11000000\n\t\/\/ with the flag enabled, the minimum allowed to be spent is\n\t\/\/ 11000000 - 1 << 20 = 9951424\n\t\/\/ So, 9951424 should pass while 9951423 should fail.\n\terr = stake.VerifyStakingPkhsAndAmounts(types,\n\t\tpkhs,\n\t\tspendAmountsNonequivR,\n\t\tspendTypes,\n\t\tspendPkhs,\n\t\tamounts,\n\t\tfalse, \/\/ Revocation\n\t\tspendRulesNonequivR,\n\t\tspendLimits)\n\tif err == nil || err.(stake.RuleError).GetCode() !=\n\t\tstake.ErrVerifyTooMuchFees {\n\t\tt.Errorf(\"TestVerifyStakingPkhsAndAmounts unexpected error: %v\", err)\n\t}\n\n\t\/\/ correct verification\n\terr = stake.VerifyStakingPkhsAndAmounts(types,\n\t\tpkhs,\n\t\tamounts,\n\t\tspendTypes,\n\t\tspendPkhs,\n\t\tspendAmounts,\n\t\ttrue, \/\/ Vote\n\t\tspendRules,\n\t\tspendLimits)\n\tif err != nil {\n\t\tt.Errorf(\"TestVerifySStxAmounts unexpected error: %v\", err)\n\t}\n}\n\nfunc TestVerifyRealTxs(t *testing.T) {\n\t\/\/ Load an SStx and the SSRtx that spends to test some real fee situation\n\t\/\/ and confirm the functionality of the functions used.\n\thexSstx, _ := hex.DecodeString(\"010000000267cfaa9ce3a50977dcd1015f4f\" +\n\t\t\"ce330071a3a9b855210e6646f6434caebda5a60200000001fffffffff6e6004\" +\n\t\t\"fd4a0a8d5823c99be0a66a5f9a89c3dd4f7cbf76880098b8ca9d80b0e020000\" +\n\t\t\"0001ffffffff05a42df60c0000000000001aba76a914c96206f8a3976057b2e\" +\n\t\t\"b846d46d4a909972fc7c788ac00000000000000000000206a1ec96206f8a397\" +\n\t\t\"6057b2eb846d46d4a909972fc7c780fe210a000000000054000000000000000\" +\n\t\t\"000001abd76a914c96206f8a3976057b2eb846d46d4a909972fc7c788ac0000\" +\n\t\t\"0000000000000000206a1ec96206f8a3976057b2eb846d46d4a909972fc7c70\" +\n\t\t\"c33d40200000000005474cb4d070000000000001abd76a914c96206f8a39760\" +\n\t\t\"57b2eb846d46d4a909972fc7c788ac00000000000000000280fe210a0000000\" +\n\t\t\"013030000000000006a47304402200dbc873e69571a4516c4ef869d856386f9\" +\n\t\t\"86c8543c0bc9f372ecd22c8606ccb102200f87a8f1b316b7675dfd1706eb22f\" +\n\t\t\"331cea14d2e2d5f2c1d88173881a0cd4a04012102716f806d1156d20b9b2482\" +\n\t\t\"2bff88549b510f400473536d3ea8d188b9fbe3835680fe210a0000000002030\" +\n\t\t\"000040000006b483045022100bc7d0b7aa2c6610b7639f492fa556954ebc52a\" +\n\t\t\"9dca5a417be4705ab424255ccd02200a0ccba2e2b7391b93b927f35150c1253\" +\n\t\t\"2bdc6f27a8e9eb0bd0bfbc8b9ab13a5012102716f806d1156d20b9b24822bff\" +\n\t\t\"88549b510f400473536d3ea8d188b9fbe38356\")\n\tsstxMtx := new(wire.MsgTx)\n\tsstxMtx.FromBytes(hexSstx)\n\tsstxTx := hcutil.NewTx(sstxMtx)\n\tsstxTypes, sstxAddrs, sstxAmts, _, sstxRules, sstxLimits, _ :=\n\t\tstake.TxSStxStakeOutputInfo(sstxTx.MsgTx())\n\n\thexSsrtx, _ := hex.DecodeString(\"010000000147f4453f244f2589551aea7c714d\" +\n\t\t\"771053b667c6612616e9c8fc0e68960a9a100000000001ffffffff0270d7210a00\" +\n\t\t\"00000000001abc76a914c96206f8a3976057b2eb846d46d4a909972fc7c788ac0c\" +\n\t\t\"33d4020000000000001abc76a914c96206f8a3976057b2eb846d46d4a909972fc7\" +\n\t\t\"c788ac000000000000000001ffffffffffffffff00000000ffffffff6b48304502\" +\n\t\t\"2100d01c52c3f0c27166e3633d93b5ba821365a73f761e23bb04cc8061a28ab1bd\" +\n\t\t\"7d02202bd65a6d16aaefe8b7f56378d58da6650f2e4b20bd5cb659dc9e842ce2d9\" +\n\t\t\"15e6012102716f806d1156d20b9b24822bff88549b510f400473536d3ea8d188b9\" +\n\t\t\"fbe38356\")\n\tssrtxMtx := new(wire.MsgTx)\n\tssrtxMtx.FromBytes(hexSsrtx)\n\tssrtxTx := hcutil.NewTx(ssrtxMtx)\n\n\tssrtxTypes, ssrtxAddrs, ssrtxAmts, err :=\n\t\tstake.TxSSRtxStakeOutputInfo(ssrtxTx.MsgTx(), &chaincfg.TestNet2Params)\n\tif err != nil {\n\t\tt.Errorf(\"Unexpected GetSSRtxStakeOutputInfo error: %v\", err.Error())\n\t}\n\n\tssrtxCalcAmts := stake.CalculateRewards(sstxAmts, sstxMtx.TxOut[0].Value,\n\t\tint64(0))\n\n\t\/\/ Here an error is thrown because the second output spends too much.\n\t\/\/ Ticket price: 217460132\n\t\/\/ 1: 170000000 - 170000000. 169999218 allowed back (-782 atoms)\n\t\/\/ 2: 170000000 - 47461132. 122538868 Change. Paid 1000 fees total.\n\t\/\/ 47460913 allowed back (-219 atoms for fee).\n\t\/\/ In this test the second output spends 47461132, which is more than\n\t\/\/ allowed.\n\terr = stake.VerifyStakingPkhsAndAmounts(sstxTypes,\n\t\tsstxAddrs,\n\t\tssrtxAmts,\n\t\tssrtxTypes,\n\t\tssrtxAddrs,\n\t\tssrtxCalcAmts,\n\t\tfalse, \/\/ Revocation\n\t\tsstxRules,\n\t\tsstxLimits)\n\tif err == nil || err.(stake.RuleError).GetCode() !=\n\t\tstake.ErrVerifySpendTooMuch {\n\t\tt.Errorf(\"No or unexpected VerifyStakingPkhsAndAmounts error: %v\",\n\t\t\terr.Error())\n\t}\n\n\t\/\/ Correct this and make sure it passes.\n\tssrtxTx.MsgTx().TxOut[1].Value = 47460913\n\tsstxTypes, sstxAddrs, sstxAmts, _, sstxRules, sstxLimits, _ =\n\t\tstake.TxSStxStakeOutputInfo(sstxTx.MsgTx())\n\tssrtxTypes, ssrtxAddrs, ssrtxAmts, err =\n\t\tstake.TxSSRtxStakeOutputInfo(ssrtxTx.MsgTx(), &chaincfg.TestNet2Params)\n\tif err != nil {\n\t\tt.Errorf(\"Unexpected GetSSRtxStakeOutputInfo error: %v\", err.Error())\n\t}\n\tssrtxCalcAmts = stake.CalculateRewards(sstxAmts, sstxMtx.TxOut[0].Value,\n\t\tint64(0))\n\terr = stake.VerifyStakingPkhsAndAmounts(sstxTypes,\n\t\tsstxAddrs,\n\t\tssrtxAmts,\n\t\tssrtxTypes,\n\t\tssrtxAddrs,\n\t\tssrtxCalcAmts,\n\t\tfalse, \/\/ Revocation\n\t\tsstxRules,\n\t\tsstxLimits)\n\tif err != nil {\n\t\tt.Errorf(\"Unexpected VerifyStakingPkhsAndAmounts error: %v\",\n\t\t\terr)\n\t}\n\n\t\/\/ Spend too much fees for the limit in the first output and\n\t\/\/ make sure it fails.\n\tssrtxTx.MsgTx().TxOut[0].Value = 0\n\tsstxTypes, sstxAddrs, sstxAmts, _, sstxRules, sstxLimits, _ =\n\t\tstake.TxSStxStakeOutputInfo(sstxTx.MsgTx())\n\tssrtxTypes, ssrtxAddrs, ssrtxAmts, err =\n\t\tstake.TxSSRtxStakeOutputInfo(ssrtxTx.MsgTx(), &chaincfg.TestNet2Params)\n\tif err != nil {\n\t\tt.Errorf(\"Unexpected GetSSRtxStakeOutputInfo error: %v\", err.Error())\n\t}\n\tssrtxCalcAmts = stake.CalculateRewards(sstxAmts, sstxMtx.TxOut[0].Value,\n\t\tint64(0))\n\terr = stake.VerifyStakingPkhsAndAmounts(sstxTypes,\n\t\tsstxAddrs,\n\t\tssrtxAmts,\n\t\tssrtxTypes,\n\t\tssrtxAddrs,\n\t\tssrtxCalcAmts,\n\t\tfalse, \/\/ Revocation\n\t\tsstxRules,\n\t\tsstxLimits)\n\tif err == nil || err.(stake.RuleError).GetCode() !=\n\t\tstake.ErrVerifyTooMuchFees {\n\t\tt.Errorf(\"No or unexpected VerifyStakingPkhsAndAmounts error: %v\",\n\t\t\terr.Error())\n\t}\n\n\t\/\/ Use everything as fees and make sure both participants are paid\n\t\/\/ equally for their contibutions. Both inputs to the SStx are the\n\t\/\/ same size, so this is possible.\n\tcopy(sstxTx.MsgTx().TxOut[3].PkScript, sstxTx.MsgTx().TxOut[1].PkScript)\n\tsstxTx.MsgTx().TxOut[4].Value = 0\n\tssrtxTx.MsgTx().TxOut[0].Value = 108730066\n\tssrtxTx.MsgTx().TxOut[1].Value = 108730066\n\tsstxTypes, sstxAddrs, sstxAmts, _, sstxRules, sstxLimits, _ =\n\t\tstake.TxSStxStakeOutputInfo(sstxTx.MsgTx())\n\tssrtxTypes, ssrtxAddrs, ssrtxAmts, err =\n\t\tstake.TxSSRtxStakeOutputInfo(ssrtxTx.MsgTx(), &chaincfg.TestNet2Params)\n\tif err != nil {\n\t\tt.Errorf(\"Unexpected GetSSRtxStakeOutputInfo error: %v\", err.Error())\n\t}\n\tssrtxCalcAmts = stake.CalculateRewards(sstxAmts, sstxMtx.TxOut[0].Value,\n\t\tint64(0))\n\terr = stake.VerifyStakingPkhsAndAmounts(sstxTypes,\n\t\tsstxAddrs,\n\t\tssrtxAmts,\n\t\tssrtxTypes,\n\t\tssrtxAddrs,\n\t\tssrtxCalcAmts,\n\t\tfalse, \/\/ Revocation\n\t\tsstxRules,\n\t\tsstxLimits)\n\tif err != nil {\n\t\tt.Errorf(\"Unexpected VerifyStakingPkhsAndAmounts error: %v\",\n\t\t\terr.Error())\n\t}\n\tif ssrtxCalcAmts[0] != ssrtxCalcAmts[1] {\n\t\tt.Errorf(\"Unexpected ssrtxCalcAmts; both values should be same but \"+\n\t\t\t\"got %v and %v\", ssrtxCalcAmts[0], ssrtxCalcAmts[1])\n\t}\n}\n\n\/\/ --------------------------------------------------------------------------------\n\/\/ TESTING VARIABLES BEGIN HERE\n\n\/\/ sstxTxIn is the first input in the reference valid sstx\nvar sstxTxIn = wire.TxIn{\n\tPreviousOutPoint: wire.OutPoint{\n\t\tHash: chainhash.Hash([32]byte{ \/\/ Make go vet happy.\n\t\t\t0x03, 0x2e, 0x38, 0xe9, 0xc0, 0xa8, 0x4c, 0x60,\n\t\t\t0x46, 0xd6, 0x87, 0xd1, 0x05, 0x56, 0xdc, 0xac,\n\t\t\t0xc4, 0x1d, 0x27, 0x5e, 0xc5, 0x5f, 0xc0, 0x07,\n\t\t\t0x79, 0xac, 0x88, 0xfd, 0xf3, 0x57, 0xa1, 0x87,\n\t\t}), \/\/ 87a157f3fd88ac7907c05fc55e271dc4acdc5605d187d646604ca8c0e9382e03\n\t\tIndex: 0,\n\t\tTree: wire.TxTreeRegular,\n\t},\n\tSignatureScript: []byte{\n\t\t0x49, \/\/ OP_DATA_73\n\t\t0x30, 0x46, 0x02, 0x21, 0x00, 0xc3, 0x52, 0xd3,\n\t\t0xdd, 0x99, 0x3a, 0x98, 0x1b, 0xeb, 0xa4, 0xa6,\n\t\t0x3a, 0xd1, 0x5c, 0x20, 0x92, 0x75, 0xca, 0x94,\n\t\t0x70, 0xab, 0xfc, 0xd5, 0x7d, 0xa9, 0x3b, 0x58,\n\t\t0xe4, 0xeb, 0x5d, 0xce, 0x82, 0x02, 0x21, 0x00,\n\t\t0x84, 0x07, 0x92, 0xbc, 0x1f, 0x45, 0x60, 0x62,\n\t\t0x81, 0x9f, 0x15, 0xd3, 0x3e, 0xe7, 0x05, 0x5c,\n\t\t0xf7, 0xb5, 0xee, 0x1a, 0xf1, 0xeb, 0xcc, 0x60,\n\t\t0x28, 0xd9, 0xcd, 0xb1, 0xc3, 0xaf, 0x77, 0x48,\n\t\t0x01, \/\/ 73-byte signature\n\t\t0x41, \/\/ OP_DATA_65\n\t\t0x04, 0xf4, 0x6d, 0xb5, 0xe9, 0xd6, 0x1a, 0x9d,\n\t\t0xc2, 0x7b, 0x8d, 0x64, 0xad, 0x23, 0xe7, 0x38,\n\t\t0x3a, 0x4e, 0x6c, 0xa1, 0x64, 0x59, 0x3c, 0x25,\n\t\t0x27, 0xc0, 0x38, 0xc0, 0x85, 0x7e, 0xb6, 0x7e,\n\t\t0xe8, 0xe8, 0x25, 0xdc, 0xa6, 0x50, 0x46, 0xb8,\n\t\t0x2c, 0x93, 0x31, 0x58, 0x6c, 0x82, 0xe0, 0xfd,\n\t\t0x1f, 0x63, 0x3f, 0x25, 0xf8, 0x7c, 0x16, 0x1b,\n\t\t0xc6, 0xf8, 0xa6, 0x30, 0x12, 0x1d, 0xf2, 0xb3,\n\t\t0xd3, \/\/ 65-byte pubkey\n\t},\n\tSequence: 0xffffffff,\n}\n\n\/\/ sstxTxOut0 is the first output in the reference valid sstx\nvar sstxTxOut0 = wire.TxOut{\n\tValue: 0x2123e300, \/\/ 556000000\n\tVersion: 0x0000,\n\tPkScript: []byte{\n\t\t0xba, \/\/ OP_SSTX\n\t\t0x76, \/\/ OP_DUP\n\t\t0xa9, \/\/ OP_HASH160\n\t\t0x14, \/\/ OP_DATA_20\n\t\t0xc3, 0x98, 0xef, 0xa9,\n\t\t0xc3, 0x92, 0xba, 0x60,\n\t\t0x13, 0xc5, 0xe0, 0x4e,\n\t\t0xe7, 0x29, 0x75, 0x5e,\n\t\t0xf7, 0xf5, 0x8b, 0x32,\n\t\t0x88, \/\/ OP_EQUALVERIFY\n\t\t0xac, \/\/ OP_CHECKSIG\n\t},\n}\n\n\/\/ sstxTxOut1 is the second output in the reference valid sstx\nvar sstxTxOut1 = wire.TxOut{\n\tValue: 0x00000000, \/\/ 0\n\tVersion: 0x0000,\n\tPkScript: []byte{\n\t\t0x6a, \/\/ OP_RETURN\n\t\t0x1e, \/\/ 30 bytes to be pushed\n\t\t0x94, 0x8c, 0x76, 0x5a, \/\/ 20 byte address\n\t\t0x69, 0x14, 0xd4, 0x3f,\n\t\t0x2a, 0x7a, 0xc1, 0x77,\n\t\t0xda, 0x2c, 0x2f, 0x6b,\n\t\t0x52, 0xde, 0x3d, 0x7c,\n\t\t0x00, 0xe3, 0x23, 0x21, \/\/ Transaction amount\n\t\t0x00, 0x00, 0x00, 0x00,\n\t\t0x44, 0x3f, 0x3f, \/\/ Fee limits\n\t},\n}\n\n\/\/ sstxTxOut2 is the third output in the reference valid sstx\nvar sstxTxOut2 = wire.TxOut{\n\tValue: 0x2223e300,\n\tVersion: 0x0000,\n\tPkScript: []byte{\n\t\t0xbd, \/\/ OP_SSTXCHANGE\n\t\t0x76, \/\/ OP_DUP\n\t\t0xa9, \/\/ OP_HASH160\n\t\t0x14, \/\/ OP_DATA_20\n\t\t0xc3, 0x98, 0xef, 0xa9,\n\t\t0xc3, 0x92, 0xba, 0x60,\n\t\t0x13, 0xc5, 0xe0, 0x4e,\n\t\t0xe7, 0x29, 0x75, 0x5e,\n\t\t0xf7, 0xf5, 0x8b, 0x32,\n\t\t0x88, \/\/ OP_EQUALVERIFY\n\t\t0xac, \/\/ OP_CHECKSIG\n\t},\n}\n\n\/\/ sstxTxOut3 is another output in an SStx, this time instruction to pay to\n\/\/ a P2SH output\nvar sstxTxOut3 = wire.TxOut{\n\tValue: 0x00000000, \/\/ 0\n\tVersion: 0x0000,\n\tPkScript: []byte{\n\t\t0x6a, \/\/ OP_RETURN\n\t\t0x1e, \/\/ 30 bytes to be pushed\n\t\t0x94, 0x8c, 0x76, 0x5a, \/\/ 20 byte address\n\t\t0x69, 0x14, 0xd4, 0x3f,\n\t\t0x2a, 0x7a, 0xc1, 0x77,\n\t\t0xda, 0x2c, 0x2f, 0x6b,\n\t\t0x52, 0xde, 0x3d, 0x7c,\n\t\t0x00, 0xe3, 0x23, 0x21, \/\/ Transaction amount\n\t\t0x00, 0x00, 0x00, 0x80, \/\/ Last byte flagged\n\t\t0x44, 0x3f, 0x3f, \/\/ Fee limits\n\t},\n}\n\n\/\/ sstxTxOut4 is the another output in the reference valid sstx, and pays change\n\/\/ to a P2SH address\nvar sstxTxOut4 = wire.TxOut{\n\tValue: 0x2223e300,\n\tVersion: 0x0000,\n\tPkScript: []byte{\n\t\t0xbd, \/\/ OP_SSTXCHANGE\n\t\t0xa9, \/\/ OP_HASH160\n\t\t0x14, \/\/ OP_DATA_20\n\t\t0xc3, 0x98, 0xef, 0xa9,\n\t\t0xc3, 0x92, 0xba, 0x60,\n\t\t0x13, 0xc5, 0xe0, 0x4e,\n\t\t0xe7, 0x29, 0x75, 0x5e,\n\t\t0xf7, 0xf5, 0x8b, 0x32,\n\t\t0x87, \/\/ OP_EQUAL\n\t},\n}\n\n\/\/ sstxTxOut4VerBad is the third output in the reference valid sstx, with a\n\/\/ bad version.\nvar sstxTxOut4VerBad = wire.TxOut{\n\tValue: 0x2223e300,\n\tVersion: 0x1234,\n\tPkScript: []byte{\n\t\t0xbd, \/\/ OP_SSTXCHANGE\n\t\t0xa9, \/\/ OP_HASH160\n\t\t0x14, \/\/ OP_DATA_20\n\t\t0xc3, 0x98, 0xef, 0xa9,\n\t\t0xc3, 0x92, 0xba, 0x60,\n\t\t0x13, 0xc5, 0xe0, 0x4e,\n\t\t0xe7, 0x29, 0x75, 0x5e,\n\t\t0xf7, 0xf5, 0x8b, 0x32,\n\t\t0x87, \/\/ OP_EQUAL\n\t},\n}\n\n\/\/ sstxMsgTx is a valid SStx MsgTx with an input and outputs and is used in various\n\/\/ tests\nvar sstxMsgTx = &wire.MsgTx{\n\tSerType: wire.TxSerializeFull,\n\tVersion: 1,\n\tTxIn: []*wire.TxIn{\n\t\t&sstxTxIn,\n\t\t&sstxTxIn,\n\t\t&sstxTxIn,\n\t},\n\tTxOut: []*wire.TxOut{\n\t\t&sstxTxOut0,\n\t\t&sstxTxOut1,\n\t\t&sstxTxOut2, \/\/ emulate change address\n\t\t&sstxTxOut1,\n\t\t&sstxTxOut2, \/\/ emulate change address\n\t\t&sstxTxOut3, \/\/ P2SH\n\t\t&sstxTxOut4, \/\/ P2SH change\n\t},\n\tLockTime: 0,\n\tExpiry: 0,\n}\n\n\/\/ sstxMsgTxExtraInputs is an invalid SStx MsgTx with too many inputs\nvar sstxMsgTxExtraInput = &wire.MsgTx{\n\tSerType: wire.TxSerializeFull,\n\tVersion: 1,\n\tTxIn: []*wire.TxIn{\n\t\t&sstxTxIn, &sstxTxIn, &sstxTxIn, &sstxTxIn, &sstxTxIn, &sstxTxIn,\n\t\t&sstxTxIn, &sstxTxIn, &sstxTxIn, &sstxTxIn, &sstxTxIn, &sstxTxIn,\n\t\t&sstxTxIn, &sstxTxIn, &sstxTxIn, &sstxTxIn, &sstxTxIn, &sstxTxIn,\n\t\t&sstxTxIn, &sstxTxIn, &sstxTxIn, &sstxTxIn, &sstxTxIn, &sstxTxIn,\n\t\t&sstxTxIn, &sstxTxIn, &sstxTxIn, &sstxTxIn, &sstxTxIn, &sstxTxIn,\n\t\t&sstxTxIn, &sstxTxIn, &sstxTxIn, &sstxTxIn, &sstxTxIn, &sstxTxIn,\n\t\t&sstxTxIn, &sstxTxIn, &sstxTxIn, &sstxTxIn, &sstxTxIn, &sstxTxIn,\n\t\t&sstxTxIn, &sstxTxIn, &sstxTxIn, &sstxTxIn, &sstxTxIn, &sstxTxIn,\n\t\t&sstxTxIn, &sstxTxIn, &sstxTxIn, &sstxTxIn, &sstxTxIn, &sstxTxIn,\n\t\t&sstxTxIn, &sstxTxIn, &sstxTxIn, &sstxTxIn, &sstxTxIn, &sstxTxIn,\n\t\t&sstxTxIn, &sstxTxIn, &sstxTxIn, &sstxTxIn, &sstxTxIn, &sstxTxIn,\n\t\t&sstxTxIn, &sstxTxIn, &sstxTxIn, &sstxTxIn, &sstxTxIn, &sstxTxIn,\n\t},\n\tTxOut: []*wire.TxOut{\n\t\t&sstxTxOut0,\n\t\t&sstxTxOut1,\n\t},\n\tLockTime: 0,\n\tExpiry: 0,\n}\n\n\/\/ sstxMsgTxExtraOutputs is an invalid SStx MsgTx with too many outputs\nvar sstxMsgTxExtraOutputs = &wire.MsgTx{\n\tSerType: wire.TxSerializeFull,\n\tVersion: 1,\n\tTxIn: []*wire.TxIn{\n\t\t&sstxTxIn,\n\t},\n\tTxOut: []*wire.TxOut{\n\t\t&sstxTxOut0, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1,\n\t\t&sstxTxOut0, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1,\n\t\t&sstxTxOut0, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1,\n\t\t&sstxTxOut0, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1,\n\t\t&sstxTxOut0, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1,\n\t\t&sstxTxOut0, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1,\n\t\t&sstxTxOut0, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1,\n\t\t&sstxTxOut0, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1,\n\t\t&sstxTxOut0, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1,\n\t\t&sstxTxOut0, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1,\n\t\t&sstxTxOut0, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1,\n\t\t&sstxTxOut0, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1,\n\t\t&sstxTxOut0, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1,\n\t\t&sstxTxOut0, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1,\n\t\t&sstxTxOut0, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1,\n\t\t&sstxTxOut0, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1,\n\t\t&sstxTxOut0, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1,\n\t\t&sstxTxOut0, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1,\n\t\t&sstxTxOut0, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1,\n\t\t&sstxTxOut0, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1,\n\t\t&sstxTxOut0, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1,\n\t\t&sstxTxOut0, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1,\n\t\t&sstxTxOut0, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1,\n\t\t&sstxTxOut0, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1,\n\t\t&sstxTxOut0, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1,\n\t\t&sstxTxOut0, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1,\n\t\t&sstxTxOut0, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1,\n\t\t&sstxTxOut0, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1, &sstxTxOut1,\n\t},\n\tLockTime: 0,\n\tExpiry: 0,\n}\n\n\/\/ sstxMismatchedInsOuts is an invalid SStx MsgTx with too many outputs for the\n\/\/ number of inputs it has\nvar sstxMismatchedInsOuts = &wire.MsgTx{\n\tSerType: wire.TxSerializeFull,\n\tVersion: 1,\n\tTxIn: []*wire.TxIn{\n\t\t&sstxTxIn,\n\t},\n\tTxOut: []*wire.TxOut{\n\t\t&sstxTxOut0, &sstxTxOut1, &sstxTxOut2, &sstxTxOut1, &sstxTxOut2,\n\t},\n\tLockTime: 0,\n\tExpiry: 0,\n}\n\n\/\/ sstxBadVersionOut is an invalid SStx MsgTx with an output containing a bad\n\/\/ version.\nvar sstxBadVersionOut = &wire.MsgTx{\n\tSerType: wire.TxSerializeFull,\n\tVersion: 1,\n\tTxIn: []*wire.TxIn{\n\t\t&sstxTxIn,\n\t\t&sstxTxIn,\n\t\t&sstxTxIn,\n\t},\n\tTxOut: []*wire.TxOut{\n\t\t&sstxTxOut0,\n\t\t&sstxTxOut1,\n\t\t&sstxTxOut2, \/\/ emulate change address\n\t\t&sstxTxOut1, \/\/ 3\n\t\t&sstxTxOut2, \/\/ 4\n\t\t&sstxTxOut3, \/\/ 5 P2SH\n\t\t&sstxTxOut4VerBad, \/\/ 6 P2SH change\n\t},\n\tLockTime: 0,\n\tExpiry: 0,\n}\n\n\/\/ sstxNullDataMissing is an invalid SStx MsgTx with no address push in the second\n\/\/ output\nvar sstxNullDataMissing = &wire.MsgTx{\n\tSerType: wire.TxSerializeFull,\n\tVersion: 1,\n\tTxIn: []*wire.TxIn{\n\t\t&sstxTxIn,\n\t},\n\tTxOut: []*wire.TxOut{\n\t\t&sstxTxOut0, &sstxTxOut0, &sstxTxOut2,\n\t},\n\tLockTime: 0,\n\tExpiry: 0,\n}\n\n\/\/ sstxNullDataMisplaced is an invalid SStx MsgTx that has the commitment and\n\/\/ change outputs swapped\nvar sstxNullDataMisplaced = &wire.MsgTx{\n\tSerType: wire.TxSerializeFull,\n\tVersion: 1,\n\tTxIn: []*wire.TxIn{\n\t\t&sstxTxIn,\n\t},\n\tTxOut: []*wire.TxOut{\n\t\t&sstxTxOut0, &sstxTxOut2, &sstxTxOut1,\n\t},\n\tLockTime: 0,\n\tExpiry: 0,\n}\n\n\/\/ ssgenTxIn0 is the 0th position input in a valid SSGen tx used to test out the\n\/\/ IsSSGen function\nvar ssgenTxIn0 = wire.TxIn{\n\tPreviousOutPoint: wire.OutPoint{\n\t\tHash: chainhash.Hash{},\n\t\tIndex: 0xffffffff,\n\t\tTree: wire.TxTreeRegular,\n\t},\n\tSignatureScript: []byte{\n\t\t0x04, 0xff, 0xff, 0x00, 0x1d, 0x01, 0x04,\n\t},\n\tBlockHeight: wire.NullBlockHeight,\n\tBlockIndex: wire.NullBlockIndex,\n\tSequence: 0xffffffff,\n}\n\n\/\/ ssgenTxIn1 is the 1st position input in a valid SSGen tx used to test out the\n\/\/ IsSSGen function\nvar ssgenTxIn1 = wire.TxIn{\n\tPreviousOutPoint: wire.OutPoint{\n\t\tHash: chainhash.Hash([32]byte{ \/\/ Make go vet happy.\n\t\t\t0x03, 0x2e, 0x38, 0xe9, 0xc0, 0xa8, 0x4c, 0x60,\n\t\t\t0x46, 0xd6, 0x87, 0xd1, 0x05, 0x56, 0xdc, 0xac,\n\t\t\t0xc4, 0x1d, 0x27, 0x5e, 0xc5, 0x5f, 0xc0, 0x07,\n\t\t\t0x79, 0xac, 0x88, 0xfd, 0xf3, 0x57, 0xa1, 0x87,\n\t\t}), \/\/ 87a157f3fd88ac7907c05fc55e271dc4acdc5605d187d646604ca8c0e9382e03\n\t\tIndex: 0,\n\t\tTree: wire.TxTreeStake,\n\t},\n\tSignatureScript: []byte{\n\t\t0x49, \/\/ OP_DATA_73\n\t\t0x30, 0x46, 0x02, 0x21, 0x00, 0xc3, 0x52, 0xd3,\n\t\t0xdd, 0x99, 0x3a, 0x98, 0x1b, 0xeb, 0xa4, 0xa6,\n\t\t0x3a, 0xd1, 0x5c, 0x20, 0x92, 0x75, 0xca, 0x94,\n\t\t0x70, 0xab, 0xfc, 0xd5, 0x7d, 0xa9, 0x3b, 0x58,\n\t\t0xe4, 0xeb, 0x5d, 0xce, 0x82, 0x02, 0x21, 0x00,\n\t\t0x84, 0x07, 0x92, 0xbc, 0x1f, 0x45, 0x60, 0x62,\n\t\t0x81, 0x9f, 0x15, 0xd3, 0x3e, 0xe7, 0x05, 0x5c,\n\t\t0xf7, 0xb5, 0xee, 0x1a, 0xf1, 0xeb, 0xcc, 0x60,\n\t\t0x28, 0xd9, 0xcd, 0xb1, 0xc3, 0xaf, 0x77, 0x48,\n\t\t0x01, \/\/ 73-byte signature\n\t\t0x41, \/\/ OP_DATA_65\n\t\t0x04, 0xf4, 0x6d, 0xb5, 0xe9, 0xd6, 0x1a, 0x9d,\n\t\t0xc2, 0x7b, 0x8d, 0x64, 0xad, 0x23, 0xe7, 0x38,\n\t\t0x3a, 0x4e, 0x6c, 0xa1, 0x64, 0x59, 0x3c, 0x25,\n\t\t0x27, 0xc0, 0x38, 0xc0, 0x85, 0x7e, 0xb6, 0x7e,\n\t\t0xe8, 0xe8, 0x25, 0xdc, 0xa6, 0x50, 0x46, 0xb8,\n\t\t0x2c, 0x93, 0x31, 0x58, 0x6c, 0x82, 0xe0, 0xfd,\n\t\t0x1f, 0x63, 0x3f, 0x25, 0xf8, 0x7c, 0x16, 0x1b,\n\t\t0xc6, 0xf8, 0xa6, 0x30, 0x12, 0x1d, 0xf2, 0xb3,\n\t\t0xd3, \/\/ 65-byte pubkey\n\t},\n\tSequence: 0xffffffff,\n}\n\n\/\/ ssgenTxOut0 is the 0th position output in a valid SSGen tx used to test out the\n\/\/ IsSSGen function\nvar ssgenTxOut0 = wire.TxOut{\n\tValue: 0x00000000, \/\/ 0\n\tVersion: 0x0000,\n\tPkScript: []byte{\n\t\t0x6a, \/\/ OP_RETURN\n\t\t0x24, \/\/ 36 bytes to be pushed\n\t\t0x94, 0x8c, 0x76, 0x5a, \/\/ 32 byte hash\n\t\t0x69, 0x14, 0xd4, 0x3f,\n\t\t0x2a, 0x7a, 0xc1, 0x77,\n\t\t0xda, 0x2c, 0x2f, 0x6b,\n\t\t0x52, 0xde, 0x3d, 0x7c,\n\t\t0xda, 0x2c, 0x2f, 0x6b,\n\t\t0x52, 0xde, 0x3d, 0x7c,\n\t\t0x52, 0xde, 0x3d, 0x7c,\n\t\t0x00, 0xe3, 0x23, 0x21, \/\/ 4 byte height\n\t},\n}\n\n\/\/ ssgenTxOut1 is the 1st position output in a valid SSGen tx used to test out the\n\/\/ IsSSGen function\nvar ssgenTxOut1 = wire.TxOut{\n\tValue: 0x00000000, \/\/ 0\n\tVersion: 0x0000,\n\tPkScript: []byte{\n\t\t0x6a, \/\/ OP_RETURN\n\t\t0x02, \/\/ 2 bytes to be pushed\n\t\t0x94, 0x8c, \/\/ Vote bits\n\t},\n}\n\n\/\/ ssgenTxOut2 is the 2nd position output in a valid SSGen tx used to test out the\n\/\/ IsSSGen function\nvar ssgenTxOut2 = wire.TxOut{\n\tValue: 0x2123e300, \/\/ 556000000\n\tVersion: 0x0000,\n\tPkScript: []byte{\n\t\t0xbb, \/\/ OP_SSGEN\n\t\t0x76, \/\/ OP_DUP\n\t\t0xa9, \/\/ OP_HASH160\n\t\t0x14, \/\/ OP_DATA_20\n\t\t0xc3, 0x98, 0xef, 0xa9,\n\t\t0xc3, 0x92, 0xba, 0x60,\n\t\t0x13, 0xc5, 0xe0, 0x4e,\n\t\t0xe7, 0x29, 0x75, 0x5e,\n\t\t0xf7, 0xf5, 0x8b, 0x32,\n\t\t0x88, \/\/ OP_EQUALVERIFY\n\t\t0xac, \/\/ OP_CHECKSIG\n\t},\n}\n\n\/\/ ssgenTxOut3 is a P2SH output\nvar ssgenTxOut3 = wire.TxOut{\n\tValue: 0x2123e300, \/\/ 556000000\n\tVersion: 0x0000,\n\tPkScript: []byte{\n\t\t0xbb, \/\/ OP_SSGEN\n\t\t0xa9, \/\/ OP_HASH160\n\t\t0x14, \/\/ OP_DATA_20\n\t\t0xc3, 0x98, 0xef, 0xa9,\n\t\t0xc3, 0x92, 0xba, 0x60,\n\t\t0x13, 0xc5, 0xe0, 0x4e,\n\t\t0xe7, 0x29, 0x75, 0x5e,\n\t\t0xf7, 0xf5, 0x8b, 0x32,\n\t\t0x87, \/\/ OP_EQUAL\n\t},\n}\n\n\/\/ ssgenTxOut3BadVer is a P2SH output with a bad version.\nvar ssgenTxOut3BadVer = wire.TxOut{\n\tValue: 0x2123e300, \/\/ 556000000\n\tVersion: 0x0100,\n\tPkScript: []byte{\n\t\t0xbb, \/\/ OP_SSGEN\n\t\t0xa9, \/\/ OP_HASH160\n\t\t0x14, \/\/ OP_DATA_20\n\t\t0xc3, 0x98, 0xef, 0xa9,\n\t\t0xc3, 0x92, 0xba, 0x60,\n\t\t0x13, 0xc5, 0xe0, 0x4e,\n\t\t0xe7, 0x29, 0x75, 0x5e,\n\t\t0xf7, 0xf5, 0x8b, 0x32,\n\t\t0x87, \/\/ OP_EQUAL\n\t},\n}\n\n\/\/ ssgenMsgTx is a valid SSGen MsgTx with an input and outputs and is used in\n\/\/ various testing scenarios\nvar ssgenMsgTx = &wire.MsgTx{\n\tSerType: wire.TxSerializeFull,\n\tVersion: 1,\n\tTxIn: []*wire.TxIn{\n\t\t&ssgenTxIn0,\n\t\t&ssgenTxIn1,\n\t},\n\tTxOut: []*wire.TxOut{\n\t\t&ssgenTxOut0,\n\t\t&ssgenTxOut1,\n\t\t&ssgenTxOut2,\n\t\t&ssgenTxOut3,\n\t},\n\tLockTime: 0,\n\tExpiry: 0,\n}\n\n\/\/ ssgenMsgTxExtraInput is an invalid SSGen MsgTx with too many inputs\nvar ssgenMsgTxExtraInput = &wire.MsgTx{\n\tSerType: wire.TxSerializeFull,\n\tVersion: 1,\n\tTxIn: []*wire.TxIn{\n\t\t&ssgenTxIn0,\n\t\t&ssgenTxIn1,\n\t\t&ssgenTxIn1,\n\t},\n\tTxOut: []*wire.TxOut{\n\t\t&ssgenTxOut0,\n\t\t&ssgenTxOut1,\n\t\t&ssgenTxOut2,\n\t},\n\tLockTime: 0,\n\tExpiry: 0,\n}\n\n\/\/ ssgenMsgTxExtraOutputs is an invalid SSGen MsgTx with too many outputs\nvar ssgenMsgTxExtraOutputs = &wire.MsgTx{\n\tSerType: wire.TxSerializeFull,\n\tVersion: 1,\n\tTxIn: []*wire.TxIn{\n\t\t&ssgenTxIn0,\n\t\t&ssgenTxIn1,\n\t},\n\tTxOut: []*wire.TxOut{\n\t\t&ssgenTxOut0,\n\t\t&ssgenTxOut1,\n\t\t&ssgenTxOut2, &ssgenTxOut2, &ssgenTxOut2, &ssgenTxOut2, &ssgenTxOut2,\n\t\t&ssgenTxOut2, &ssgenTxOut2, &ssgenTxOut2, &ssgenTxOut2, &ssgenTxOut2,\n\t\t&ssgenTxOut2, &ssgenTxOut2, &ssgenTxOut2, &ssgenTxOut2, &ssgenTxOut2,\n\t\t&ssgenTxOut2, &ssgenTxOut2, &ssgenTxOut2, &ssgenTxOut2, &ssgenTxOut2,\n\t\t&ssgenTxOut2, &ssgenTxOut2, &ssgenTxOut2, &ssgenTxOut2, &ssgenTxOut2,\n\t\t&ssgenTxOut2, &ssgenTxOut2, &ssgenTxOut2, &ssgenTxOut2, &ssgenTxOut2,\n\t\t&ssgenTxOut2, &ssgenTxOut2, &ssgenTxOut2, &ssgenTxOut2, &ssgenTxOut2,\n\t\t&ssgenTxOut2, &ssgenTxOut2, &ssgenTxOut2, &ssgenTxOut2, &ssgenTxOut2,\n\t\t&ssgenTxOut2, &ssgenTxOut2, &ssgenTxOut2, &ssgenTxOut2, &ssgenTxOut2,\n\t\t&ssgenTxOut2, &ssgenTxOut2, &ssgenTxOut2, &ssgenTxOut2, &ssgenTxOut2,\n\t\t&ssgenTxOut2, &ssgenTxOut2, &ssgenTxOut2, &ssgenTxOut2, &ssgenTxOut2,\n\t\t&ssgenTxOut2, &ssgenTxOut2, &ssgenTxOut2, &ssgenTxOut2, &ssgenTxOut2,\n\t\t&ssgenTxOut2, &ssgenTxOut2, &ssgenTxOut2, &ssgenTxOut2, &ssgenTxOut2,\n\t\t&ssgenTxOut2, &ssgenTxOut2, &ssgenTxOut2, &ssgenTxOut2, &ssgenTxOut2,\n\t\t&ssgenTxOut2, &ssgenTxOut2, &ssgenTxOut2, &ssgenTxOut2, &ssgenTxOut2,\n\t\t&ssgenTxOut2, &ssgenTxOut2, &ssgenTxOut2, &ssgenTxOut2, &ssgenTxOut2,\n\t},\n\tLockTime: 0,\n\tExpiry: 0,\n}\n\n\/\/ ssgenMsgTxStakeBaseWrong is an invalid SSGen tx with the stakebase in the wrong\n\/\/ position\nvar ssgenMsgTxStakeBaseWrong = &wire.MsgTx{\n\tSerType: wire.TxSerializeFull,\n\tVersion: 1,\n\tTxIn: []*wire.TxIn{\n\t\t&ssgenTxIn1,\n\t\t&ssgenTxIn0,\n\t},\n\tTxOut: []*wire.TxOut{\n\t\t&ssgenTxOut0,\n\t\t&ssgenTxOut1,\n\t\t&ssgenTxOut2,\n\t},\n\tLockTime: 0,\n\tExpiry: 0,\n}\n\n\/\/ ssgenMsgTxBadVerOut is an invalid SSGen tx that contains an output with a bad\n\/\/ version\nvar ssgenMsgTxBadVerOut = &wire.MsgTx{\n\tSerType: wire.TxSerializeFull,\n\tVersion: 1,\n\tTxIn: []*wire.TxIn{\n\t\t&ssgenTxIn0,\n\t\t&ssgenTxIn1,\n\t},\n\tTxOut: []*wire.TxOut{\n\t\t&ssgenTxOut0,\n\t\t&ssgenTxOut1,\n\t\t&ssgenTxOut2,\n\t\t&ssgenTxOut3BadVer,\n\t},\n\tLockTime: 0,\n\tExpiry: 0,\n}\n\n\/\/ ssgenMsgTxWrongZeroethOut is an invalid SSGen tx with the first output being not\n\/\/ an OP_RETURN push\nvar ssgenMsgTxWrongZeroethOut = &wire.MsgTx{\n\tSerType: wire.TxSerializeFull,\n\tVersion: 1,\n\tTxIn: []*wire.TxIn{\n\t\t&ssgenTxIn0,\n\t\t&ssgenTxIn1,\n\t},\n\tTxOut: []*wire.TxOut{\n\t\t&ssgenTxOut2,\n\t\t&ssgenTxOut1,\n\t\t&ssgenTxOut0,\n\t},\n\tLockTime: 0,\n\tExpiry: 0,\n}\n\n\/\/ ssgenMsgTxWrongFirstOut is an invalid SSGen tx with the second output being not\n\/\/ an OP_RETURN push\nvar ssgenMsgTxWrongFirstOut = &wire.MsgTx{\n\tSerType: wire.TxSerializeFull,\n\tVersion: 1,\n\tTxIn: []*wire.TxIn{\n\t\t&ssgenTxIn0,\n\t\t&ssgenTxIn1,\n\t},\n\tTxOut: []*wire.TxOut{\n\t\t&ssgenTxOut0,\n\t\t&ssgenTxOut2,\n\t\t&ssgenTxOut1,\n\t},\n\tLockTime: 0,\n\tExpiry: 0,\n}\n\n\/\/ ssrtxTxIn is the 0th position input in a valid SSRtx tx used to test out the\n\/\/ IsSSRtx function\nvar ssrtxTxIn = wire.TxIn{\n\tPreviousOutPoint: wire.OutPoint{\n\t\tHash: chainhash.Hash([32]byte{ \/\/ Make go vet happy.\n\t\t\t0x03, 0x2e, 0x38, 0xe9, 0xc0, 0xa8, 0x4c, 0x60,\n\t\t\t0x46, 0xd6, 0x87, 0xd1, 0x05, 0x56, 0xdc, 0xac,\n\t\t\t0xc4, 0x1d, 0x27, 0x5e, 0xc5, 0x5f, 0xc0, 0x07,\n\t\t\t0x79, 0xac, 0x88, 0xfd, 0xf3, 0x57, 0xa1, 0x87,\n\t\t}), \/\/ 87a157f3fd88ac7907c05fc55e271dc4acdc5605d187d646604ca8c0e9382e03\n\t\tIndex: 0,\n\t\tTree: wire.TxTreeStake,\n\t},\n\tSignatureScript: []byte{\n\t\t0x49, \/\/ OP_DATA_73\n\t\t0x30, 0x46, 0x02, 0x21, 0x00, 0xc3, 0x52, 0xd3,\n\t\t0xdd, 0x99, 0x3a, 0x98, 0x1b, 0xeb, 0xa4, 0xa6,\n\t\t0x3a, 0xd1, 0x5c, 0x20, 0x92, 0x75, 0xca, 0x94,\n\t\t0x70, 0xab, 0xfc, 0xd5, 0x7d, 0xa9, 0x3b, 0x58,\n\t\t0xe4, 0xeb, 0x5d, 0xce, 0x82, 0x02, 0x21, 0x00,\n\t\t0x84, 0x07, 0x92, 0xbc, 0x1f, 0x45, 0x60, 0x62,\n\t\t0x81, 0x9f, 0x15, 0xd3, 0x3e, 0xe7, 0x05, 0x5c,\n\t\t0xf7, 0xb5, 0xee, 0x1a, 0xf1, 0xeb, 0xcc, 0x60,\n\t\t0x28, 0xd9, 0xcd, 0xb1, 0xc3, 0xaf, 0x77, 0x48,\n\t\t0x01, \/\/ 73-byte signature\n\t\t0x41, \/\/ OP_DATA_65\n\t\t0x04, 0xf4, 0x6d, 0xb5, 0xe9, 0xd6, 0x1a, 0x9d,\n\t\t0xc2, 0x7b, 0x8d, 0x64, 0xad, 0x23, 0xe7, 0x38,\n\t\t0x3a, 0x4e, 0x6c, 0xa1, 0x64, 0x59, 0x3c, 0x25,\n\t\t0x27, 0xc0, 0x38, 0xc0, 0x85, 0x7e, 0xb6, 0x7e,\n\t\t0xe8, 0xe8, 0x25, 0xdc, 0xa6, 0x50, 0x46, 0xb8,\n\t\t0x2c, 0x93, 0x31, 0x58, 0x6c, 0x82, 0xe0, 0xfd,\n\t\t0x1f, 0x63, 0x3f, 0x25, 0xf8, 0x7c, 0x16, 0x1b,\n\t\t0xc6, 0xf8, 0xa6, 0x30, 0x12, 0x1d, 0xf2, 0xb3,\n\t\t0xd3, \/\/ 65-byte pubkey\n\t},\n\tSequence: 0xffffffff,\n}\n\n\/\/ ssrtxTxOut is the 0th position output in a valid SSRtx tx used to test out the\n\/\/ IsSSRtx function\nvar ssrtxTxOut = wire.TxOut{\n\tValue: 0x2122e300,\n\tVersion: 0x0000,\n\tPkScript: []byte{\n\t\t0xbc, \/\/ OP_SSGEN\n\t\t0x76, \/\/ OP_DUP\n\t\t0xa9, \/\/ OP_HASH160\n\t\t0x14, \/\/ OP_DATA_20\n\t\t0xc3, 0x98, 0xef, 0xa9,\n\t\t0xc3, 0x92, 0xba, 0x60,\n\t\t0x13, 0xc5, 0xe0, 0x4e,\n\t\t0xe7, 0x29, 0x75, 0x5e,\n\t\t0xf7, 0xf5, 0x8b, 0x33,\n\t\t0x88, \/\/ OP_EQUALVERIFY\n\t\t0xac, \/\/ OP_CHECKSIG\n\t},\n}\n\n\/\/ ssrtxTxOut2 is a P2SH output\nvar ssrtxTxOut2 = wire.TxOut{\n\tValue: 0x2123e300, \/\/ 556000000\n\tVersion: 0x0000,\n\tPkScript: []byte{\n\t\t0xbc, \/\/ OP_SSRTX\n\t\t0xa9, \/\/ OP_HASH160\n\t\t0x14, \/\/ OP_DATA_20\n\t\t0xc3, 0x98, 0xef, 0xa9,\n\t\t0xc3, 0x92, 0xba, 0x60,\n\t\t0x13, 0xc5, 0xe0, 0x4e,\n\t\t0xe7, 0x29, 0x75, 0x5e,\n\t\t0xf7, 0xf5, 0x8b, 0x32,\n\t\t0x87, \/\/ OP_EQUAL\n\t},\n}\n\n\/\/ ssrtxTxOut2BadVer is a P2SH output with a non-default script version\nvar ssrtxTxOut2BadVer = wire.TxOut{\n\tValue: 0x2123e300, \/\/ 556000000\n\tVersion: 0x0100,\n\tPkScript: []byte{\n\t\t0xbc, \/\/ OP_SSRTX\n\t\t0xa9, \/\/ OP_HASH160\n\t\t0x14, \/\/ OP_DATA_20\n\t\t0xc3, 0x98, 0xef, 0xa9,\n\t\t0xc3, 0x92, 0xba, 0x60,\n\t\t0x13, 0xc5, 0xe0, 0x4e,\n\t\t0xe7, 0x29, 0x75, 0x5e,\n\t\t0xf7, 0xf5, 0x8b, 0x32,\n\t\t0x87, \/\/ OP_EQUAL\n\t},\n}\n\n\/\/ ssrtxMsgTx is a valid SSRtx MsgTx with an input and outputs and is used in\n\/\/ various testing scenarios\nvar ssrtxMsgTx = &wire.MsgTx{\n\tSerType: wire.TxSerializeFull,\n\tVersion: 1,\n\tTxIn: []*wire.TxIn{\n\t\t&ssrtxTxIn,\n\t},\n\tTxOut: []*wire.TxOut{\n\t\t&ssrtxTxOut,\n\t\t&ssrtxTxOut2,\n\t},\n\tLockTime: 0,\n\tExpiry: 0,\n}\n\n\/\/ ssrtxMsgTx is a valid SSRtx MsgTx with an input and outputs and is used in\n\/\/ various testing scenarios\nvar ssrtxMsgTxTooManyInputs = &wire.MsgTx{\n\tSerType: wire.TxSerializeFull,\n\tVersion: 1,\n\tTxIn: []*wire.TxIn{\n\t\t&ssrtxTxIn,\n\t\t&ssrtxTxIn,\n\t},\n\tTxOut: []*wire.TxOut{\n\t\t&ssrtxTxOut,\n\t},\n\tLockTime: 0,\n\tExpiry: 0,\n}\n\n\/\/ ssrtxMsgTx is a valid SSRtx MsgTx with an input and outputs and is used in\n\/\/ various testing scenarios\nvar ssrtxMsgTxTooManyOutputs = &wire.MsgTx{\n\tSerType: wire.TxSerializeFull,\n\tVersion: 1,\n\tTxIn: []*wire.TxIn{\n\t\t&ssrtxTxIn,\n\t},\n\tTxOut: []*wire.TxOut{\n\t\t&ssrtxTxOut, &ssrtxTxOut, &ssrtxTxOut, &ssrtxTxOut, &ssrtxTxOut,\n\t\t&ssrtxTxOut, &ssrtxTxOut, &ssrtxTxOut, &ssrtxTxOut, &ssrtxTxOut,\n\t\t&ssrtxTxOut, &ssrtxTxOut, &ssrtxTxOut, &ssrtxTxOut, &ssrtxTxOut,\n\t\t&ssrtxTxOut, &ssrtxTxOut, &ssrtxTxOut, &ssrtxTxOut, &ssrtxTxOut,\n\t\t&ssrtxTxOut, &ssrtxTxOut, &ssrtxTxOut, &ssrtxTxOut, &ssrtxTxOut,\n\t\t&ssrtxTxOut, &ssrtxTxOut, &ssrtxTxOut, &ssrtxTxOut, &ssrtxTxOut,\n\t\t&ssrtxTxOut, &ssrtxTxOut, &ssrtxTxOut, &ssrtxTxOut, &ssrtxTxOut,\n\t\t&ssrtxTxOut, &ssrtxTxOut, &ssrtxTxOut, &ssrtxTxOut, &ssrtxTxOut,\n\t\t&ssrtxTxOut, &ssrtxTxOut, &ssrtxTxOut, &ssrtxTxOut, &ssrtxTxOut,\n\t\t&ssrtxTxOut, &ssrtxTxOut, &ssrtxTxOut, &ssrtxTxOut, &ssrtxTxOut,\n\t\t&ssrtxTxOut, &ssrtxTxOut, &ssrtxTxOut, &ssrtxTxOut, &ssrtxTxOut,\n\t\t&ssrtxTxOut, &ssrtxTxOut, &ssrtxTxOut, &ssrtxTxOut, &ssrtxTxOut,\n\t\t&ssrtxTxOut, &ssrtxTxOut, &ssrtxTxOut, &ssrtxTxOut, &ssrtxTxOut,\n\t\t&ssrtxTxOut, &ssrtxTxOut, &ssrtxTxOut, &ssrtxTxOut, &ssrtxTxOut,\n\t\t&ssrtxTxOut, &ssrtxTxOut, &ssrtxTxOut, &ssrtxTxOut, &ssrtxTxOut,\n\t},\n\tLockTime: 0,\n\tExpiry: 0,\n}\n\nvar ssrtxMsgTxBadVerOut = &wire.MsgTx{\n\tSerType: wire.TxSerializeFull,\n\tVersion: 1,\n\tTxIn: []*wire.TxIn{\n\t\t&ssrtxTxIn,\n\t},\n\tTxOut: []*wire.TxOut{\n\t\t&ssrtxTxOut,\n\t\t&ssrtxTxOut2BadVer,\n\t},\n\tLockTime: 0,\n\tExpiry: 0,\n}\n","avg_line_length":30.2332738626,"max_line_length":83,"alphanum_fraction":0.6786509892} +{"size":977,"ext":"go","lang":"Go","max_stars_count":2100.0,"content":"\/\/ Copyright (C) 2017 Google Inc.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\npackage git\n\nimport (\n\t\"context\"\n)\n\n\/\/ Checkout checks out the given CL by SHA.\nfunc (g Git) Checkout(ctx context.Context, sha SHA) error {\n\t_, _, err := g.run(ctx, \"checkout\", sha)\n\treturn err\n}\n\n\/\/ CheckoutBranch checks out the given branch by name.\nfunc (g Git) CheckoutBranch(ctx context.Context, branch string) error {\n\t_, _, err := g.run(ctx, \"checkout\", branch)\n\treturn err\n}\n","avg_line_length":30.53125,"max_line_length":75,"alphanum_fraction":0.7205731832} +{"size":4862,"ext":"go","lang":"Go","max_stars_count":1.0,"content":"\/\/ Code generated by smithy-go-codegen DO NOT EDIT.\n\npackage organizations\n\nimport (\n\t\"context\"\n\tawsmiddleware \"github.com\/aws\/aws-sdk-go-v2\/aws\/middleware\"\n\t\"github.com\/aws\/aws-sdk-go-v2\/aws\/signer\/v4\"\n\t\"github.com\/aws\/aws-sdk-go-v2\/service\/organizations\/types\"\n\t\"github.com\/aws\/smithy-go\/middleware\"\n\tsmithyhttp \"github.com\/aws\/smithy-go\/transport\/http\"\n)\n\n\/\/ Returns the contents of the effective policy for specified policy type and\n\/\/ account. The effective policy is the aggregation of any policies of the\n\/\/ specified type that the account inherits, plus any policy of that type that is\n\/\/ directly attached to the account. This operation applies only to policy types\n\/\/ other than service control policies (SCPs). For more information about policy\n\/\/ inheritance, see How Policy Inheritance Works\n\/\/ (http:\/\/docs.aws.amazon.com\/organizations\/latest\/userguide\/orgs_manage_policies-inheritance.html)\n\/\/ in the AWS Organizations User Guide. This operation can be called only from the\n\/\/ organization's management account or by a member account that is a delegated\n\/\/ administrator for an AWS service.\nfunc (c *Client) DescribeEffectivePolicy(ctx context.Context, params *DescribeEffectivePolicyInput, optFns ...func(*Options)) (*DescribeEffectivePolicyOutput, error) {\n\tif params == nil {\n\t\tparams = &DescribeEffectivePolicyInput{}\n\t}\n\n\tresult, metadata, err := c.invokeOperation(ctx, \"DescribeEffectivePolicy\", params, optFns, addOperationDescribeEffectivePolicyMiddlewares)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tout := result.(*DescribeEffectivePolicyOutput)\n\tout.ResultMetadata = metadata\n\treturn out, nil\n}\n\ntype DescribeEffectivePolicyInput struct {\n\n\t\/\/ The type of policy that you want information about. You can specify one of the\n\t\/\/ following values:\n\t\/\/\n\t\/\/ * AISERVICES_OPT_OUT_POLICY\n\t\/\/ (https:\/\/docs.aws.amazon.com\/organizations\/latest\/userguide\/orgs_manage_policies_ai-opt-out.html)\n\t\/\/\n\t\/\/ *\n\t\/\/ BACKUP_POLICY\n\t\/\/ (https:\/\/docs.aws.amazon.com\/organizations\/latest\/userguide\/orgs_manage_policies_backup.html)\n\t\/\/\n\t\/\/ *\n\t\/\/ TAG_POLICY\n\t\/\/ (https:\/\/docs.aws.amazon.com\/organizations\/latest\/userguide\/orgs_manage_policies_tag-policies.html)\n\t\/\/\n\t\/\/ This member is required.\n\tPolicyType types.EffectivePolicyType\n\n\t\/\/ When you're signed in as the management account, specify the ID of the account\n\t\/\/ that you want details about. Specifying an organization root or organizational\n\t\/\/ unit (OU) as the target is not supported.\n\tTargetId *string\n}\n\ntype DescribeEffectivePolicyOutput struct {\n\n\t\/\/ The contents of the effective policy.\n\tEffectivePolicy *types.EffectivePolicy\n\n\t\/\/ Metadata pertaining to the operation's result.\n\tResultMetadata middleware.Metadata\n}\n\nfunc addOperationDescribeEffectivePolicyMiddlewares(stack *middleware.Stack, options Options) (err error) {\n\terr = stack.Serialize.Add(&awsAwsjson11_serializeOpDescribeEffectivePolicy{}, middleware.After)\n\tif err != nil {\n\t\treturn err\n\t}\n\terr = stack.Deserialize.Add(&awsAwsjson11_deserializeOpDescribeEffectivePolicy{}, middleware.After)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif err = addSetLoggerMiddleware(stack, options); err != nil {\n\t\treturn err\n\t}\n\tif err = awsmiddleware.AddClientRequestIDMiddleware(stack); err != nil {\n\t\treturn err\n\t}\n\tif err = smithyhttp.AddComputeContentLengthMiddleware(stack); err != nil {\n\t\treturn err\n\t}\n\tif err = addResolveEndpointMiddleware(stack, options); err != nil {\n\t\treturn err\n\t}\n\tif err = v4.AddComputePayloadSHA256Middleware(stack); err != nil {\n\t\treturn err\n\t}\n\tif err = addRetryMiddlewares(stack, options); err != nil {\n\t\treturn err\n\t}\n\tif err = addHTTPSignerV4Middleware(stack, options); err != nil {\n\t\treturn err\n\t}\n\tif err = awsmiddleware.AddRawResponseToMetadata(stack); err != nil {\n\t\treturn err\n\t}\n\tif err = awsmiddleware.AddRecordResponseTiming(stack); err != nil {\n\t\treturn err\n\t}\n\tif err = addClientUserAgent(stack); err != nil {\n\t\treturn err\n\t}\n\tif err = smithyhttp.AddErrorCloseResponseBodyMiddleware(stack); err != nil {\n\t\treturn err\n\t}\n\tif err = smithyhttp.AddCloseResponseBodyMiddleware(stack); err != nil {\n\t\treturn err\n\t}\n\tif err = addOpDescribeEffectivePolicyValidationMiddleware(stack); err != nil {\n\t\treturn err\n\t}\n\tif err = stack.Initialize.Add(newServiceMetadataMiddleware_opDescribeEffectivePolicy(options.Region), middleware.Before); err != nil {\n\t\treturn err\n\t}\n\tif err = addRequestIDRetrieverMiddleware(stack); err != nil {\n\t\treturn err\n\t}\n\tif err = addResponseErrorMiddleware(stack); err != nil {\n\t\treturn err\n\t}\n\tif err = addRequestResponseLogging(stack, options); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}\n\nfunc newServiceMetadataMiddleware_opDescribeEffectivePolicy(region string) *awsmiddleware.RegisterServiceMetadata {\n\treturn &awsmiddleware.RegisterServiceMetadata{\n\t\tRegion: region,\n\t\tServiceID: ServiceID,\n\t\tSigningName: \"organizations\",\n\t\tOperationName: \"DescribeEffectivePolicy\",\n\t}\n}\n","avg_line_length":33.7638888889,"max_line_length":167,"alphanum_fraction":0.7593582888} +{"size":475,"ext":"go","lang":"Go","max_stars_count":2.0,"content":"package main\n\nimport \"fmt\"\n\nfunc functionA(a int, b int) {\n\tfmt.Println(a + b)\n}\n\nfunc functionB(a int, b int) {\n\tfmt.Println(a * b)\n}\n\nfunc functionC(a bool) {\n\tfmt.Println(!a)\n}\n\nfunc functionD(a string, b int) {\n\tfor i := 0; i < b; i++ {\n\t\tfmt.Print(a)\n\t}\n\tfmt.Println()\n}\n\nfunc main() {\n\tfunctionA(2, 3)\n\tfunctionB(2, 3)\n\tfunctionC(true)\n\tfunctionD(\"$\", 4)\n\tfunctionA(5, 6)\n\tfunctionB(5, 6)\n\tfunctionC(false)\n\tfunctionD(\"ha\", 3)\n}\n\n\/*\n5\n6\nfalse\n$$$$\n11\n30\ntrue\nhahaha\n*\/\n","avg_line_length":10.5555555556,"max_line_length":33,"alphanum_fraction":0.6063157895} +{"size":18032,"ext":"go","lang":"Go","max_stars_count":1.0,"content":"package scheduler\n\nimport (\n\t\"fmt\"\n\t\"log\"\n\t\"time\"\n\n\tmemdb \"github.com\/hashicorp\/go-memdb\"\n\t\"github.com\/hashicorp\/go-multierror\"\n\t\"github.com\/hashicorp\/nomad\/helper\/uuid\"\n\t\"github.com\/hashicorp\/nomad\/nomad\/structs\"\n)\n\nconst (\n\t\/\/ maxServiceScheduleAttempts is used to limit the number of times\n\t\/\/ we will attempt to schedule if we continue to hit conflicts for services.\n\tmaxServiceScheduleAttempts = 5\n\n\t\/\/ maxBatchScheduleAttempts is used to limit the number of times\n\t\/\/ we will attempt to schedule if we continue to hit conflicts for batch.\n\tmaxBatchScheduleAttempts = 2\n\n\t\/\/ allocNotNeeded is the status used when a job no longer requires an allocation\n\tallocNotNeeded = \"alloc not needed due to job update\"\n\n\t\/\/ allocMigrating is the status used when we must migrate an allocation\n\tallocMigrating = \"alloc is being migrated\"\n\n\t\/\/ allocUpdating is the status used when a job requires an update\n\tallocUpdating = \"alloc is being updated due to job update\"\n\n\t\/\/ allocLost is the status used when an allocation is lost\n\tallocLost = \"alloc is lost since its node is down\"\n\n\t\/\/ allocInPlace is the status used when speculating on an in-place update\n\tallocInPlace = \"alloc updating in-place\"\n\n\t\/\/ blockedEvalMaxPlanDesc is the description used for blocked evals that are\n\t\/\/ a result of hitting the max number of plan attempts\n\tblockedEvalMaxPlanDesc = \"created due to placement conflicts\"\n\n\t\/\/ blockedEvalFailedPlacements is the description used for blocked evals\n\t\/\/ that are a result of failing to place all allocations.\n\tblockedEvalFailedPlacements = \"created to place remaining allocations\"\n)\n\n\/\/ SetStatusError is used to set the status of the evaluation to the given error\ntype SetStatusError struct {\n\tErr error\n\tEvalStatus string\n}\n\nfunc (s *SetStatusError) Error() string {\n\treturn s.Err.Error()\n}\n\n\/\/ GenericScheduler is used for 'service' and 'batch' type jobs. This scheduler is\n\/\/ designed for long-lived services, and as such spends more time attemping\n\/\/ to make a high quality placement. This is the primary scheduler for\n\/\/ most workloads. It also supports a 'batch' mode to optimize for fast decision\n\/\/ making at the cost of quality.\ntype GenericScheduler struct {\n\tlogger *log.Logger\n\tstate State\n\tplanner Planner\n\tbatch bool\n\n\teval *structs.Evaluation\n\tjob *structs.Job\n\tplan *structs.Plan\n\tplanResult *structs.PlanResult\n\tctx *EvalContext\n\tstack *GenericStack\n\n\tfollowupEvalWait time.Duration\n\tnextEval *structs.Evaluation\n\n\tdeployment *structs.Deployment\n\n\tblocked *structs.Evaluation\n\tfailedTGAllocs map[string]*structs.AllocMetric\n\tqueuedAllocs map[string]int\n}\n\n\/\/ NewServiceScheduler is a factory function to instantiate a new service scheduler\nfunc NewServiceScheduler(logger *log.Logger, state State, planner Planner) Scheduler {\n\ts := &GenericScheduler{\n\t\tlogger: logger,\n\t\tstate: state,\n\t\tplanner: planner,\n\t\tbatch: false,\n\t}\n\treturn s\n}\n\n\/\/ NewBatchScheduler is a factory function to instantiate a new batch scheduler\nfunc NewBatchScheduler(logger *log.Logger, state State, planner Planner) Scheduler {\n\ts := &GenericScheduler{\n\t\tlogger: logger,\n\t\tstate: state,\n\t\tplanner: planner,\n\t\tbatch: true,\n\t}\n\treturn s\n}\n\n\/\/ Process is used to handle a single evaluation\nfunc (s *GenericScheduler) Process(eval *structs.Evaluation) error {\n\t\/\/ Store the evaluation\n\ts.eval = eval\n\n\t\/\/ Verify the evaluation trigger reason is understood\n\tswitch eval.TriggeredBy {\n\tcase structs.EvalTriggerJobRegister, structs.EvalTriggerNodeUpdate,\n\t\tstructs.EvalTriggerJobDeregister, structs.EvalTriggerRollingUpdate,\n\t\tstructs.EvalTriggerPeriodicJob, structs.EvalTriggerMaxPlans,\n\t\tstructs.EvalTriggerDeploymentWatcher:\n\tdefault:\n\t\tdesc := fmt.Sprintf(\"scheduler cannot handle '%s' evaluation reason\",\n\t\t\teval.TriggeredBy)\n\t\treturn setStatus(s.logger, s.planner, s.eval, s.nextEval, s.blocked,\n\t\t\ts.failedTGAllocs, structs.EvalStatusFailed, desc, s.queuedAllocs,\n\t\t\ts.deployment.GetID())\n\t}\n\n\t\/\/ Retry up to the maxScheduleAttempts and reset if progress is made.\n\tprogress := func() bool { return progressMade(s.planResult) }\n\tlimit := maxServiceScheduleAttempts\n\tif s.batch {\n\t\tlimit = maxBatchScheduleAttempts\n\t}\n\tif err := retryMax(limit, s.process, progress); err != nil {\n\t\tif statusErr, ok := err.(*SetStatusError); ok {\n\t\t\t\/\/ Scheduling was tried but made no forward progress so create a\n\t\t\t\/\/ blocked eval to retry once resources become available.\n\t\t\tvar mErr multierror.Error\n\t\t\tif err := s.createBlockedEval(true); err != nil {\n\t\t\t\tmErr.Errors = append(mErr.Errors, err)\n\t\t\t}\n\t\t\tif err := setStatus(s.logger, s.planner, s.eval, s.nextEval, s.blocked,\n\t\t\t\ts.failedTGAllocs, statusErr.EvalStatus, err.Error(),\n\t\t\t\ts.queuedAllocs, s.deployment.GetID()); err != nil {\n\t\t\t\tmErr.Errors = append(mErr.Errors, err)\n\t\t\t}\n\t\t\treturn mErr.ErrorOrNil()\n\t\t}\n\t\treturn err\n\t}\n\n\t\/\/ If the current evaluation is a blocked evaluation and we didn't place\n\t\/\/ everything, do not update the status to complete.\n\tif s.eval.Status == structs.EvalStatusBlocked && len(s.failedTGAllocs) != 0 {\n\t\te := s.ctx.Eligibility()\n\t\tnewEval := s.eval.Copy()\n\t\tnewEval.EscapedComputedClass = e.HasEscaped()\n\t\tnewEval.ClassEligibility = e.GetClasses()\n\t\tnewEval.QuotaLimitReached = e.QuotaLimitReached()\n\t\treturn s.planner.ReblockEval(newEval)\n\t}\n\n\t\/\/ Update the status to complete\n\treturn setStatus(s.logger, s.planner, s.eval, s.nextEval, s.blocked,\n\t\ts.failedTGAllocs, structs.EvalStatusComplete, \"\", s.queuedAllocs,\n\t\ts.deployment.GetID())\n}\n\n\/\/ createBlockedEval creates a blocked eval and submits it to the planner. If\n\/\/ failure is set to true, the eval's trigger reason reflects that.\nfunc (s *GenericScheduler) createBlockedEval(planFailure bool) error {\n\te := s.ctx.Eligibility()\n\tescaped := e.HasEscaped()\n\n\t\/\/ Only store the eligible classes if the eval hasn't escaped.\n\tvar classEligibility map[string]bool\n\tif !escaped {\n\t\tclassEligibility = e.GetClasses()\n\t}\n\n\ts.blocked = s.eval.CreateBlockedEval(classEligibility, escaped, e.QuotaLimitReached())\n\tif planFailure {\n\t\ts.blocked.TriggeredBy = structs.EvalTriggerMaxPlans\n\t\ts.blocked.StatusDescription = blockedEvalMaxPlanDesc\n\t} else {\n\t\ts.blocked.StatusDescription = blockedEvalFailedPlacements\n\t}\n\n\treturn s.planner.CreateEval(s.blocked)\n}\n\n\/\/ process is wrapped in retryMax to iteratively run the handler until we have no\n\/\/ further work or we've made the maximum number of attempts.\nfunc (s *GenericScheduler) process() (bool, error) {\n\t\/\/ Lookup the Job by ID\n\tvar err error\n\tws := memdb.NewWatchSet()\n\ts.job, err = s.state.JobByID(ws, s.eval.Namespace, s.eval.JobID)\n\tif err != nil {\n\t\treturn false, fmt.Errorf(\"failed to get job %q: %v\", s.eval.JobID, err)\n\t}\n\n\tnumTaskGroups := 0\n\tstopped := s.job.Stopped()\n\tif !stopped {\n\t\tnumTaskGroups = len(s.job.TaskGroups)\n\t}\n\ts.queuedAllocs = make(map[string]int, numTaskGroups)\n\n\t\/\/ Create a plan\n\ts.plan = s.eval.MakePlan(s.job)\n\n\tif !s.batch {\n\t\t\/\/ Get any existing deployment\n\t\ts.deployment, err = s.state.LatestDeploymentByJobID(ws, s.eval.Namespace, s.eval.JobID)\n\t\tif err != nil {\n\t\t\treturn false, fmt.Errorf(\"failed to get job deployment %q: %v\", s.eval.JobID, err)\n\t\t}\n\t}\n\n\t\/\/ Reset the failed allocations\n\ts.failedTGAllocs = nil\n\n\t\/\/ Create an evaluation context\n\ts.ctx = NewEvalContext(s.state, s.plan, s.logger)\n\n\t\/\/ Construct the placement stack\n\ts.stack = NewGenericStack(s.batch, s.ctx)\n\tif !s.job.Stopped() {\n\t\ts.stack.SetJob(s.job)\n\t}\n\n\t\/\/ Compute the target job allocations\n\tif err := s.computeJobAllocs(); err != nil {\n\t\ts.logger.Printf(\"[ERR] sched: %#v: %v\", s.eval, err)\n\t\treturn false, err\n\t}\n\n\t\/\/ If there are failed allocations, we need to create a blocked evaluation\n\t\/\/ to place the failed allocations when resources become available. If the\n\t\/\/ current evaluation is already a blocked eval, we reuse it.\n\tif s.eval.Status != structs.EvalStatusBlocked && len(s.failedTGAllocs) != 0 && s.blocked == nil {\n\t\tif err := s.createBlockedEval(false); err != nil {\n\t\t\ts.logger.Printf(\"[ERR] sched: %#v failed to make blocked eval: %v\", s.eval, err)\n\t\t\treturn false, err\n\t\t}\n\t\ts.logger.Printf(\"[DEBUG] sched: %#v: failed to place all allocations, blocked eval '%s' created\", s.eval, s.blocked.ID)\n\t}\n\n\t\/\/ If the plan is a no-op, we can bail. If AnnotatePlan is set submit the plan\n\t\/\/ anyways to get the annotations.\n\tif s.plan.IsNoOp() && !s.eval.AnnotatePlan {\n\t\treturn true, nil\n\t}\n\n\t\/\/ If we need a followup eval and we haven't created one, do so.\n\tif s.followupEvalWait != 0 && s.nextEval == nil {\n\t\ts.nextEval = s.eval.NextRollingEval(s.followupEvalWait)\n\t\tif err := s.planner.CreateEval(s.nextEval); err != nil {\n\t\t\ts.logger.Printf(\"[ERR] sched: %#v failed to make next eval for rolling migration: %v\", s.eval, err)\n\t\t\treturn false, err\n\t\t}\n\t\ts.logger.Printf(\"[DEBUG] sched: %#v: rolling migration limit reached, next eval '%s' created\", s.eval, s.nextEval.ID)\n\t}\n\n\t\/\/ Submit the plan and store the results.\n\tresult, newState, err := s.planner.SubmitPlan(s.plan)\n\ts.planResult = result\n\tif err != nil {\n\t\treturn false, err\n\t}\n\n\t\/\/ Decrement the number of allocations pending per task group based on the\n\t\/\/ number of allocations successfully placed\n\tadjustQueuedAllocations(s.logger, result, s.queuedAllocs)\n\n\t\/\/ If we got a state refresh, try again since we have stale data\n\tif newState != nil {\n\t\ts.logger.Printf(\"[DEBUG] sched: %#v: refresh forced\", s.eval)\n\t\ts.state = newState\n\t\treturn false, nil\n\t}\n\n\t\/\/ Try again if the plan was not fully committed, potential conflict\n\tfullCommit, expected, actual := result.FullCommit(s.plan)\n\tif !fullCommit {\n\t\ts.logger.Printf(\"[DEBUG] sched: %#v: attempted %d placements, %d placed\",\n\t\t\ts.eval, expected, actual)\n\t\tif newState == nil {\n\t\t\treturn false, fmt.Errorf(\"missing state refresh after partial commit\")\n\t\t}\n\t\treturn false, nil\n\t}\n\n\t\/\/ Success!\n\treturn true, nil\n}\n\n\/\/ filterCompleteAllocs filters allocations that are terminal and should be\n\/\/ re-placed.\nfunc (s *GenericScheduler) filterCompleteAllocs(allocs []*structs.Allocation) []*structs.Allocation {\n\tfilter := func(a *structs.Allocation) bool {\n\t\tif s.batch {\n\t\t\t\/\/ Allocs from batch jobs should be filtered when the desired status\n\t\t\t\/\/ is terminal and the client did not finish or when the client\n\t\t\t\/\/ status is failed so that they will be replaced. If they are\n\t\t\t\/\/ complete but not failed, they shouldn't be replaced.\n\t\t\tswitch a.DesiredStatus {\n\t\t\tcase structs.AllocDesiredStatusStop, structs.AllocDesiredStatusEvict:\n\t\t\t\treturn !a.RanSuccessfully()\n\t\t\tdefault:\n\t\t\t}\n\n\t\t\tswitch a.ClientStatus {\n\t\t\tcase structs.AllocClientStatusFailed:\n\t\t\t\treturn true\n\t\t\tdefault:\n\t\t\t\treturn false\n\t\t\t}\n\t\t}\n\n\t\t\/\/ Filter terminal, non batch allocations\n\t\treturn a.TerminalStatus()\n\t}\n\n\tn := len(allocs)\n\tfor i := 0; i < n; i++ {\n\t\tif filter(allocs[i]) {\n\t\t\t\/\/ Remove the allocation\n\t\t\tallocs[i], allocs[n-1] = allocs[n-1], nil\n\t\t\ti--\n\t\t\tn--\n\t\t}\n\t}\n\n\treturn allocs[:n]\n}\n\n\/\/ computeJobAllocs is used to reconcile differences between the job,\n\/\/ existing allocations and node status to update the allocations.\nfunc (s *GenericScheduler) computeJobAllocs() error {\n\t\/\/ Lookup the allocations by JobID\n\tws := memdb.NewWatchSet()\n\tallocs, err := s.state.AllocsByJob(ws, s.eval.Namespace, s.eval.JobID, true)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to get allocs for job '%s': %v\",\n\t\t\ts.eval.JobID, err)\n\t}\n\n\t\/\/ Determine the tainted nodes containing job allocs\n\ttainted, err := taintedNodes(s.state, allocs)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to get tainted nodes for job '%s': %v\",\n\t\t\ts.eval.JobID, err)\n\t}\n\n\t\/\/ Update the allocations which are in pending\/running state on tainted\n\t\/\/ nodes to lost\n\tupdateNonTerminalAllocsToLost(s.plan, tainted, allocs)\n\n\t\/\/ Filter out the allocations in a terminal state\n\tallocs = s.filterCompleteAllocs(allocs)\n\n\treconciler := NewAllocReconciler(s.ctx.Logger(),\n\t\tgenericAllocUpdateFn(s.ctx, s.stack, s.eval.ID),\n\t\ts.batch, s.eval.JobID, s.job, s.deployment, allocs, tainted)\n\tresults := reconciler.Compute()\n\ts.logger.Printf(\"[DEBUG] sched: %#v: %#v\", s.eval, results)\n\n\tif s.eval.AnnotatePlan {\n\t\ts.plan.Annotations = &structs.PlanAnnotations{\n\t\t\tDesiredTGUpdates: results.desiredTGUpdates,\n\t\t}\n\t}\n\n\t\/\/ Add the deployment changes to the plan\n\ts.plan.Deployment = results.deployment\n\ts.plan.DeploymentUpdates = results.deploymentUpdates\n\n\t\/\/ Store the the follow up eval wait duration. If set this will trigger a\n\t\/\/ follow up eval to handle node draining.\n\ts.followupEvalWait = results.followupEvalWait\n\n\t\/\/ Update the stored deployment\n\tif results.deployment != nil {\n\t\ts.deployment = results.deployment\n\t}\n\n\t\/\/ Handle the stop\n\tfor _, stop := range results.stop {\n\t\ts.plan.AppendUpdate(stop.alloc, structs.AllocDesiredStatusStop, stop.statusDescription, stop.clientStatus)\n\t}\n\n\t\/\/ Handle the in-place updates\n\tfor _, update := range results.inplaceUpdate {\n\t\tif update.DeploymentID != s.deployment.GetID() {\n\t\t\tupdate.DeploymentID = s.deployment.GetID()\n\t\t\tupdate.DeploymentStatus = nil\n\t\t}\n\t\ts.ctx.Plan().AppendAlloc(update)\n\t}\n\n\t\/\/ Nothing remaining to do if placement is not required\n\tif len(results.place)+len(results.destructiveUpdate) == 0 {\n\t\tif !s.job.Stopped() {\n\t\t\tfor _, tg := range s.job.TaskGroups {\n\t\t\t\ts.queuedAllocs[tg.Name] = 0\n\t\t\t}\n\t\t}\n\t\treturn nil\n\t}\n\n\t\/\/ Record the number of allocations that needs to be placed per Task Group\n\tfor _, place := range results.place {\n\t\ts.queuedAllocs[place.taskGroup.Name] += 1\n\t}\n\tfor _, destructive := range results.destructiveUpdate {\n\t\ts.queuedAllocs[destructive.placeTaskGroup.Name] += 1\n\t}\n\n\t\/\/ Compute the placements\n\tplace := make([]placementResult, 0, len(results.place))\n\tfor _, p := range results.place {\n\t\tplace = append(place, p)\n\t}\n\n\tdestructive := make([]placementResult, 0, len(results.destructiveUpdate))\n\tfor _, p := range results.destructiveUpdate {\n\t\tdestructive = append(destructive, p)\n\t}\n\treturn s.computePlacements(destructive, place)\n}\n\n\/\/ computePlacements computes placements for allocations. It is given the set of\n\/\/ destructive updates to place and the set of new placements to place.\nfunc (s *GenericScheduler) computePlacements(destructive, place []placementResult) error {\n\t\/\/ Get the base nodes\n\tnodes, byDC, err := readyNodesInDCs(s.state, s.job.Datacenters)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tvar deploymentID string\n\tif s.deployment != nil {\n\t\tdeploymentID = s.deployment.ID\n\t}\n\n\t\/\/ Update the set of placement ndoes\n\ts.stack.SetNodes(nodes)\n\n\t\/\/ Have to handle destructive changes first as we need to discount their\n\t\/\/ resources. To understand this imagine the resources were reduced and the\n\t\/\/ count was scaled up.\n\tfor _, results := range [][]placementResult{destructive, place} {\n\t\tfor _, missing := range results {\n\t\t\t\/\/ Get the task group\n\t\t\ttg := missing.TaskGroup()\n\n\t\t\t\/\/ Check if this task group has already failed\n\t\t\tif metric, ok := s.failedTGAllocs[tg.Name]; ok {\n\t\t\t\tmetric.CoalescedFailures += 1\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\t\/\/ Find the preferred node\n\t\t\tpreferredNode, err := s.findPreferredNode(missing)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\t\/\/ Check if we should stop the previous allocation upon successful\n\t\t\t\/\/ placement of its replacement. This allow atomic placements\/stops. We\n\t\t\t\/\/ stop the allocation before trying to find a replacement because this\n\t\t\t\/\/ frees the resources currently used by the previous allocation.\n\t\t\tstopPrevAlloc, stopPrevAllocDesc := missing.StopPreviousAlloc()\n\t\t\tif stopPrevAlloc {\n\t\t\t\ts.plan.AppendUpdate(missing.PreviousAllocation(), structs.AllocDesiredStatusStop, stopPrevAllocDesc, \"\")\n\t\t\t}\n\n\t\t\t\/\/ Attempt to match the task group\n\t\t\tvar option *RankedNode\n\t\t\tif preferredNode != nil {\n\t\t\t\toption, _ = s.stack.SelectPreferringNodes(tg, []*structs.Node{preferredNode})\n\t\t\t} else {\n\t\t\t\toption, _ = s.stack.Select(tg)\n\t\t\t}\n\n\t\t\t\/\/ Store the available nodes by datacenter\n\t\t\ts.ctx.Metrics().NodesAvailable = byDC\n\n\t\t\t\/\/ Set fields based on if we found an allocation option\n\t\t\tif option != nil {\n\t\t\t\t\/\/ Create an allocation for this\n\t\t\t\talloc := &structs.Allocation{\n\t\t\t\t\tID: uuid.Generate(),\n\t\t\t\t\tNamespace: s.job.Namespace,\n\t\t\t\t\tEvalID: s.eval.ID,\n\t\t\t\t\tName: missing.Name(),\n\t\t\t\t\tJobID: s.job.ID,\n\t\t\t\t\tTaskGroup: tg.Name,\n\t\t\t\t\tMetrics: s.ctx.Metrics(),\n\t\t\t\t\tNodeID: option.Node.ID,\n\t\t\t\t\tDeploymentID: deploymentID,\n\t\t\t\t\tTaskResources: option.TaskResources,\n\t\t\t\t\tDesiredStatus: structs.AllocDesiredStatusRun,\n\t\t\t\t\tClientStatus: structs.AllocClientStatusPending,\n\n\t\t\t\t\tSharedResources: &structs.Resources{\n\t\t\t\t\t\tDiskMB: tg.EphemeralDisk.SizeMB,\n\t\t\t\t\t},\n\t\t\t\t}\n\n\t\t\t\t\/\/ If the new allocation is replacing an older allocation then we\n\t\t\t\t\/\/ set the record the older allocation id so that they are chained\n\t\t\t\tif prev := missing.PreviousAllocation(); prev != nil {\n\t\t\t\t\talloc.PreviousAllocation = prev.ID\n\t\t\t\t}\n\n\t\t\t\t\/\/ If we are placing a canary and we found a match, add the canary\n\t\t\t\t\/\/ to the deployment state object.\n\t\t\t\tif missing.Canary() {\n\t\t\t\t\tif state, ok := s.deployment.TaskGroups[tg.Name]; ok {\n\t\t\t\t\t\tstate.PlacedCanaries = append(state.PlacedCanaries, alloc.ID)\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t\/\/ Track the placement\n\t\t\t\ts.plan.AppendAlloc(alloc)\n\n\t\t\t} else {\n\t\t\t\t\/\/ Lazy initialize the failed map\n\t\t\t\tif s.failedTGAllocs == nil {\n\t\t\t\t\ts.failedTGAllocs = make(map[string]*structs.AllocMetric)\n\t\t\t\t}\n\n\t\t\t\t\/\/ Track the fact that we didn't find a placement\n\t\t\t\ts.failedTGAllocs[tg.Name] = s.ctx.Metrics()\n\n\t\t\t\t\/\/ If we weren't able to find a replacement for the allocation, back\n\t\t\t\t\/\/ out the fact that we asked to stop the allocation.\n\t\t\t\tif stopPrevAlloc {\n\t\t\t\t\ts.plan.PopUpdate(missing.PreviousAllocation())\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}\n\n\/\/ findPreferredNode finds the preferred node for an allocation\nfunc (s *GenericScheduler) findPreferredNode(place placementResult) (node *structs.Node, err error) {\n\tif prev := place.PreviousAllocation(); prev != nil && place.TaskGroup().EphemeralDisk.Sticky == true {\n\t\tvar preferredNode *structs.Node\n\t\tws := memdb.NewWatchSet()\n\t\tpreferredNode, err = s.state.NodeByID(ws, prev.NodeID)\n\t\tif preferredNode.Ready() {\n\t\t\tnode = preferredNode\n\t\t}\n\t}\n\treturn\n}\n","avg_line_length":32.1426024955,"max_line_length":121,"alphanum_fraction":0.7148957409} +{"size":23191,"ext":"go","lang":"Go","max_stars_count":1127.0,"content":"package network\n\n\/\/ Copyright (c) Microsoft Corporation. All rights reserved.\n\/\/ Licensed under the MIT License. See License.txt in the project root for license information.\n\/\/\n\/\/ Code generated by Microsoft (R) AutoRest Code Generator.\n\/\/ Changes may cause incorrect behavior and will be lost if the code is regenerated.\n\nimport (\n\t\"context\"\n\t\"github.com\/Azure\/go-autorest\/autorest\"\n\t\"github.com\/Azure\/go-autorest\/autorest\/azure\"\n\t\"github.com\/Azure\/go-autorest\/tracing\"\n\t\"net\/http\"\n)\n\n\/\/ ExpressRoutePortsClient is the network Client\ntype ExpressRoutePortsClient struct {\n\tBaseClient\n}\n\n\/\/ NewExpressRoutePortsClient creates an instance of the ExpressRoutePortsClient client.\nfunc NewExpressRoutePortsClient(subscriptionID string) ExpressRoutePortsClient {\n\treturn NewExpressRoutePortsClientWithBaseURI(DefaultBaseURI, subscriptionID)\n}\n\n\/\/ NewExpressRoutePortsClientWithBaseURI creates an instance of the ExpressRoutePortsClient client using a custom\n\/\/ endpoint. Use this when interacting with an Azure cloud that uses a non-standard base URI (sovereign clouds, Azure\n\/\/ stack).\nfunc NewExpressRoutePortsClientWithBaseURI(baseURI string, subscriptionID string) ExpressRoutePortsClient {\n\treturn ExpressRoutePortsClient{NewWithBaseURI(baseURI, subscriptionID)}\n}\n\n\/\/ CreateOrUpdate creates or updates the specified ExpressRoutePort resource.\n\/\/ Parameters:\n\/\/ resourceGroupName - the name of the resource group.\n\/\/ expressRoutePortName - the name of the ExpressRoutePort resource.\n\/\/ parameters - parameters supplied to the create ExpressRoutePort operation.\nfunc (client ExpressRoutePortsClient) CreateOrUpdate(ctx context.Context, resourceGroupName string, expressRoutePortName string, parameters ExpressRoutePort) (result ExpressRoutePortsCreateOrUpdateFuture, err error) {\n\tif tracing.IsEnabled() {\n\t\tctx = tracing.StartSpan(ctx, fqdn+\"\/ExpressRoutePortsClient.CreateOrUpdate\")\n\t\tdefer func() {\n\t\t\tsc := -1\n\t\t\tif result.FutureAPI != nil && result.FutureAPI.Response() != nil {\n\t\t\t\tsc = result.FutureAPI.Response().StatusCode\n\t\t\t}\n\t\t\ttracing.EndSpan(ctx, sc, err)\n\t\t}()\n\t}\n\treq, err := client.CreateOrUpdatePreparer(ctx, resourceGroupName, expressRoutePortName, parameters)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"network.ExpressRoutePortsClient\", \"CreateOrUpdate\", nil, \"Failure preparing request\")\n\t\treturn\n\t}\n\n\tresult, err = client.CreateOrUpdateSender(req)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"network.ExpressRoutePortsClient\", \"CreateOrUpdate\", result.Response(), \"Failure sending request\")\n\t\treturn\n\t}\n\n\treturn\n}\n\n\/\/ CreateOrUpdatePreparer prepares the CreateOrUpdate request.\nfunc (client ExpressRoutePortsClient) CreateOrUpdatePreparer(ctx context.Context, resourceGroupName string, expressRoutePortName string, parameters ExpressRoutePort) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"expressRoutePortName\": autorest.Encode(\"path\", expressRoutePortName),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2018-10-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tparameters.Etag = nil\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application\/json; charset=utf-8\"),\n\t\tautorest.AsPut(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"\/subscriptions\/{subscriptionId}\/resourceGroups\/{resourceGroupName}\/providers\/Microsoft.Network\/ExpressRoutePorts\/{expressRoutePortName}\", pathParameters),\n\t\tautorest.WithJSON(parameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}\n\n\/\/ CreateOrUpdateSender sends the CreateOrUpdate request. The method will close the\n\/\/ http.Response Body if it receives an error.\nfunc (client ExpressRoutePortsClient) CreateOrUpdateSender(req *http.Request) (future ExpressRoutePortsCreateOrUpdateFuture, err error) {\n\tvar resp *http.Response\n\tfuture.FutureAPI = &azure.Future{}\n\tresp, err = client.Send(req, azure.DoRetryWithRegistration(client.Client))\n\tif err != nil {\n\t\treturn\n\t}\n\tvar azf azure.Future\n\tazf, err = azure.NewFutureFromResponse(resp)\n\tfuture.FutureAPI = &azf\n\tfuture.Result = future.result\n\treturn\n}\n\n\/\/ CreateOrUpdateResponder handles the response to the CreateOrUpdate request. The method always\n\/\/ closes the http.Response Body.\nfunc (client ExpressRoutePortsClient) CreateOrUpdateResponder(resp *http.Response) (result ExpressRoutePort, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated),\n\t\tautorest.ByUnmarshallingJSON(&result),\n\t\tautorest.ByClosing())\n\tresult.Response = autorest.Response{Response: resp}\n\treturn\n}\n\n\/\/ Delete deletes the specified ExpressRoutePort resource.\n\/\/ Parameters:\n\/\/ resourceGroupName - the name of the resource group.\n\/\/ expressRoutePortName - the name of the ExpressRoutePort resource.\nfunc (client ExpressRoutePortsClient) Delete(ctx context.Context, resourceGroupName string, expressRoutePortName string) (result ExpressRoutePortsDeleteFuture, err error) {\n\tif tracing.IsEnabled() {\n\t\tctx = tracing.StartSpan(ctx, fqdn+\"\/ExpressRoutePortsClient.Delete\")\n\t\tdefer func() {\n\t\t\tsc := -1\n\t\t\tif result.FutureAPI != nil && result.FutureAPI.Response() != nil {\n\t\t\t\tsc = result.FutureAPI.Response().StatusCode\n\t\t\t}\n\t\t\ttracing.EndSpan(ctx, sc, err)\n\t\t}()\n\t}\n\treq, err := client.DeletePreparer(ctx, resourceGroupName, expressRoutePortName)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"network.ExpressRoutePortsClient\", \"Delete\", nil, \"Failure preparing request\")\n\t\treturn\n\t}\n\n\tresult, err = client.DeleteSender(req)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"network.ExpressRoutePortsClient\", \"Delete\", result.Response(), \"Failure sending request\")\n\t\treturn\n\t}\n\n\treturn\n}\n\n\/\/ DeletePreparer prepares the Delete request.\nfunc (client ExpressRoutePortsClient) DeletePreparer(ctx context.Context, resourceGroupName string, expressRoutePortName string) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"expressRoutePortName\": autorest.Encode(\"path\", expressRoutePortName),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2018-10-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsDelete(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"\/subscriptions\/{subscriptionId}\/resourceGroups\/{resourceGroupName}\/providers\/Microsoft.Network\/ExpressRoutePorts\/{expressRoutePortName}\", pathParameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}\n\n\/\/ DeleteSender sends the Delete request. The method will close the\n\/\/ http.Response Body if it receives an error.\nfunc (client ExpressRoutePortsClient) DeleteSender(req *http.Request) (future ExpressRoutePortsDeleteFuture, err error) {\n\tvar resp *http.Response\n\tfuture.FutureAPI = &azure.Future{}\n\tresp, err = client.Send(req, azure.DoRetryWithRegistration(client.Client))\n\tif err != nil {\n\t\treturn\n\t}\n\tvar azf azure.Future\n\tazf, err = azure.NewFutureFromResponse(resp)\n\tfuture.FutureAPI = &azf\n\tfuture.Result = future.result\n\treturn\n}\n\n\/\/ DeleteResponder handles the response to the Delete request. The method always\n\/\/ closes the http.Response Body.\nfunc (client ExpressRoutePortsClient) DeleteResponder(resp *http.Response) (result autorest.Response, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted, http.StatusNoContent),\n\t\tautorest.ByClosing())\n\tresult.Response = resp\n\treturn\n}\n\n\/\/ Get retrieves the requested ExpressRoutePort resource.\n\/\/ Parameters:\n\/\/ resourceGroupName - the name of the resource group.\n\/\/ expressRoutePortName - the name of ExpressRoutePort.\nfunc (client ExpressRoutePortsClient) Get(ctx context.Context, resourceGroupName string, expressRoutePortName string) (result ExpressRoutePort, err error) {\n\tif tracing.IsEnabled() {\n\t\tctx = tracing.StartSpan(ctx, fqdn+\"\/ExpressRoutePortsClient.Get\")\n\t\tdefer func() {\n\t\t\tsc := -1\n\t\t\tif result.Response.Response != nil {\n\t\t\t\tsc = result.Response.Response.StatusCode\n\t\t\t}\n\t\t\ttracing.EndSpan(ctx, sc, err)\n\t\t}()\n\t}\n\treq, err := client.GetPreparer(ctx, resourceGroupName, expressRoutePortName)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"network.ExpressRoutePortsClient\", \"Get\", nil, \"Failure preparing request\")\n\t\treturn\n\t}\n\n\tresp, err := client.GetSender(req)\n\tif err != nil {\n\t\tresult.Response = autorest.Response{Response: resp}\n\t\terr = autorest.NewErrorWithError(err, \"network.ExpressRoutePortsClient\", \"Get\", resp, \"Failure sending request\")\n\t\treturn\n\t}\n\n\tresult, err = client.GetResponder(resp)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"network.ExpressRoutePortsClient\", \"Get\", resp, \"Failure responding to request\")\n\t\treturn\n\t}\n\n\treturn\n}\n\n\/\/ GetPreparer prepares the Get request.\nfunc (client ExpressRoutePortsClient) GetPreparer(ctx context.Context, resourceGroupName string, expressRoutePortName string) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"expressRoutePortName\": autorest.Encode(\"path\", expressRoutePortName),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2018-10-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsGet(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"\/subscriptions\/{subscriptionId}\/resourceGroups\/{resourceGroupName}\/providers\/Microsoft.Network\/ExpressRoutePorts\/{expressRoutePortName}\", pathParameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}\n\n\/\/ GetSender sends the Get request. The method will close the\n\/\/ http.Response Body if it receives an error.\nfunc (client ExpressRoutePortsClient) GetSender(req *http.Request) (*http.Response, error) {\n\treturn client.Send(req, azure.DoRetryWithRegistration(client.Client))\n}\n\n\/\/ GetResponder handles the response to the Get request. The method always\n\/\/ closes the http.Response Body.\nfunc (client ExpressRoutePortsClient) GetResponder(resp *http.Response) (result ExpressRoutePort, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK),\n\t\tautorest.ByUnmarshallingJSON(&result),\n\t\tautorest.ByClosing())\n\tresult.Response = autorest.Response{Response: resp}\n\treturn\n}\n\n\/\/ List list all the ExpressRoutePort resources in the specified subscription\nfunc (client ExpressRoutePortsClient) List(ctx context.Context) (result ExpressRoutePortListResultPage, err error) {\n\tif tracing.IsEnabled() {\n\t\tctx = tracing.StartSpan(ctx, fqdn+\"\/ExpressRoutePortsClient.List\")\n\t\tdefer func() {\n\t\t\tsc := -1\n\t\t\tif result.erplr.Response.Response != nil {\n\t\t\t\tsc = result.erplr.Response.Response.StatusCode\n\t\t\t}\n\t\t\ttracing.EndSpan(ctx, sc, err)\n\t\t}()\n\t}\n\tresult.fn = client.listNextResults\n\treq, err := client.ListPreparer(ctx)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"network.ExpressRoutePortsClient\", \"List\", nil, \"Failure preparing request\")\n\t\treturn\n\t}\n\n\tresp, err := client.ListSender(req)\n\tif err != nil {\n\t\tresult.erplr.Response = autorest.Response{Response: resp}\n\t\terr = autorest.NewErrorWithError(err, \"network.ExpressRoutePortsClient\", \"List\", resp, \"Failure sending request\")\n\t\treturn\n\t}\n\n\tresult.erplr, err = client.ListResponder(resp)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"network.ExpressRoutePortsClient\", \"List\", resp, \"Failure responding to request\")\n\t\treturn\n\t}\n\tif result.erplr.hasNextLink() && result.erplr.IsEmpty() {\n\t\terr = result.NextWithContext(ctx)\n\t\treturn\n\t}\n\n\treturn\n}\n\n\/\/ ListPreparer prepares the List request.\nfunc (client ExpressRoutePortsClient) ListPreparer(ctx context.Context) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2018-10-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsGet(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"\/subscriptions\/{subscriptionId}\/providers\/Microsoft.Network\/ExpressRoutePorts\", pathParameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}\n\n\/\/ ListSender sends the List request. The method will close the\n\/\/ http.Response Body if it receives an error.\nfunc (client ExpressRoutePortsClient) ListSender(req *http.Request) (*http.Response, error) {\n\treturn client.Send(req, azure.DoRetryWithRegistration(client.Client))\n}\n\n\/\/ ListResponder handles the response to the List request. The method always\n\/\/ closes the http.Response Body.\nfunc (client ExpressRoutePortsClient) ListResponder(resp *http.Response) (result ExpressRoutePortListResult, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK),\n\t\tautorest.ByUnmarshallingJSON(&result),\n\t\tautorest.ByClosing())\n\tresult.Response = autorest.Response{Response: resp}\n\treturn\n}\n\n\/\/ listNextResults retrieves the next set of results, if any.\nfunc (client ExpressRoutePortsClient) listNextResults(ctx context.Context, lastResults ExpressRoutePortListResult) (result ExpressRoutePortListResult, err error) {\n\treq, err := lastResults.expressRoutePortListResultPreparer(ctx)\n\tif err != nil {\n\t\treturn result, autorest.NewErrorWithError(err, \"network.ExpressRoutePortsClient\", \"listNextResults\", nil, \"Failure preparing next results request\")\n\t}\n\tif req == nil {\n\t\treturn\n\t}\n\tresp, err := client.ListSender(req)\n\tif err != nil {\n\t\tresult.Response = autorest.Response{Response: resp}\n\t\treturn result, autorest.NewErrorWithError(err, \"network.ExpressRoutePortsClient\", \"listNextResults\", resp, \"Failure sending next results request\")\n\t}\n\tresult, err = client.ListResponder(resp)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"network.ExpressRoutePortsClient\", \"listNextResults\", resp, \"Failure responding to next results request\")\n\t}\n\treturn\n}\n\n\/\/ ListComplete enumerates all values, automatically crossing page boundaries as required.\nfunc (client ExpressRoutePortsClient) ListComplete(ctx context.Context) (result ExpressRoutePortListResultIterator, err error) {\n\tif tracing.IsEnabled() {\n\t\tctx = tracing.StartSpan(ctx, fqdn+\"\/ExpressRoutePortsClient.List\")\n\t\tdefer func() {\n\t\t\tsc := -1\n\t\t\tif result.Response().Response.Response != nil {\n\t\t\t\tsc = result.page.Response().Response.Response.StatusCode\n\t\t\t}\n\t\t\ttracing.EndSpan(ctx, sc, err)\n\t\t}()\n\t}\n\tresult.page, err = client.List(ctx)\n\treturn\n}\n\n\/\/ ListByResourceGroup list all the ExpressRoutePort resources in the specified resource group.\n\/\/ Parameters:\n\/\/ resourceGroupName - the name of the resource group.\nfunc (client ExpressRoutePortsClient) ListByResourceGroup(ctx context.Context, resourceGroupName string) (result ExpressRoutePortListResultPage, err error) {\n\tif tracing.IsEnabled() {\n\t\tctx = tracing.StartSpan(ctx, fqdn+\"\/ExpressRoutePortsClient.ListByResourceGroup\")\n\t\tdefer func() {\n\t\t\tsc := -1\n\t\t\tif result.erplr.Response.Response != nil {\n\t\t\t\tsc = result.erplr.Response.Response.StatusCode\n\t\t\t}\n\t\t\ttracing.EndSpan(ctx, sc, err)\n\t\t}()\n\t}\n\tresult.fn = client.listByResourceGroupNextResults\n\treq, err := client.ListByResourceGroupPreparer(ctx, resourceGroupName)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"network.ExpressRoutePortsClient\", \"ListByResourceGroup\", nil, \"Failure preparing request\")\n\t\treturn\n\t}\n\n\tresp, err := client.ListByResourceGroupSender(req)\n\tif err != nil {\n\t\tresult.erplr.Response = autorest.Response{Response: resp}\n\t\terr = autorest.NewErrorWithError(err, \"network.ExpressRoutePortsClient\", \"ListByResourceGroup\", resp, \"Failure sending request\")\n\t\treturn\n\t}\n\n\tresult.erplr, err = client.ListByResourceGroupResponder(resp)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"network.ExpressRoutePortsClient\", \"ListByResourceGroup\", resp, \"Failure responding to request\")\n\t\treturn\n\t}\n\tif result.erplr.hasNextLink() && result.erplr.IsEmpty() {\n\t\terr = result.NextWithContext(ctx)\n\t\treturn\n\t}\n\n\treturn\n}\n\n\/\/ ListByResourceGroupPreparer prepares the ListByResourceGroup request.\nfunc (client ExpressRoutePortsClient) ListByResourceGroupPreparer(ctx context.Context, resourceGroupName string) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2018-10-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsGet(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"\/subscriptions\/{subscriptionId}\/resourceGroups\/{resourceGroupName}\/providers\/Microsoft.Network\/ExpressRoutePorts\", pathParameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}\n\n\/\/ ListByResourceGroupSender sends the ListByResourceGroup request. The method will close the\n\/\/ http.Response Body if it receives an error.\nfunc (client ExpressRoutePortsClient) ListByResourceGroupSender(req *http.Request) (*http.Response, error) {\n\treturn client.Send(req, azure.DoRetryWithRegistration(client.Client))\n}\n\n\/\/ ListByResourceGroupResponder handles the response to the ListByResourceGroup request. The method always\n\/\/ closes the http.Response Body.\nfunc (client ExpressRoutePortsClient) ListByResourceGroupResponder(resp *http.Response) (result ExpressRoutePortListResult, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK),\n\t\tautorest.ByUnmarshallingJSON(&result),\n\t\tautorest.ByClosing())\n\tresult.Response = autorest.Response{Response: resp}\n\treturn\n}\n\n\/\/ listByResourceGroupNextResults retrieves the next set of results, if any.\nfunc (client ExpressRoutePortsClient) listByResourceGroupNextResults(ctx context.Context, lastResults ExpressRoutePortListResult) (result ExpressRoutePortListResult, err error) {\n\treq, err := lastResults.expressRoutePortListResultPreparer(ctx)\n\tif err != nil {\n\t\treturn result, autorest.NewErrorWithError(err, \"network.ExpressRoutePortsClient\", \"listByResourceGroupNextResults\", nil, \"Failure preparing next results request\")\n\t}\n\tif req == nil {\n\t\treturn\n\t}\n\tresp, err := client.ListByResourceGroupSender(req)\n\tif err != nil {\n\t\tresult.Response = autorest.Response{Response: resp}\n\t\treturn result, autorest.NewErrorWithError(err, \"network.ExpressRoutePortsClient\", \"listByResourceGroupNextResults\", resp, \"Failure sending next results request\")\n\t}\n\tresult, err = client.ListByResourceGroupResponder(resp)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"network.ExpressRoutePortsClient\", \"listByResourceGroupNextResults\", resp, \"Failure responding to next results request\")\n\t}\n\treturn\n}\n\n\/\/ ListByResourceGroupComplete enumerates all values, automatically crossing page boundaries as required.\nfunc (client ExpressRoutePortsClient) ListByResourceGroupComplete(ctx context.Context, resourceGroupName string) (result ExpressRoutePortListResultIterator, err error) {\n\tif tracing.IsEnabled() {\n\t\tctx = tracing.StartSpan(ctx, fqdn+\"\/ExpressRoutePortsClient.ListByResourceGroup\")\n\t\tdefer func() {\n\t\t\tsc := -1\n\t\t\tif result.Response().Response.Response != nil {\n\t\t\t\tsc = result.page.Response().Response.Response.StatusCode\n\t\t\t}\n\t\t\ttracing.EndSpan(ctx, sc, err)\n\t\t}()\n\t}\n\tresult.page, err = client.ListByResourceGroup(ctx, resourceGroupName)\n\treturn\n}\n\n\/\/ UpdateTags update ExpressRoutePort tags\n\/\/ Parameters:\n\/\/ resourceGroupName - the name of the resource group.\n\/\/ expressRoutePortName - the name of the ExpressRoutePort resource.\n\/\/ parameters - parameters supplied to update ExpressRoutePort resource tags.\nfunc (client ExpressRoutePortsClient) UpdateTags(ctx context.Context, resourceGroupName string, expressRoutePortName string, parameters TagsObject) (result ExpressRoutePortsUpdateTagsFuture, err error) {\n\tif tracing.IsEnabled() {\n\t\tctx = tracing.StartSpan(ctx, fqdn+\"\/ExpressRoutePortsClient.UpdateTags\")\n\t\tdefer func() {\n\t\t\tsc := -1\n\t\t\tif result.FutureAPI != nil && result.FutureAPI.Response() != nil {\n\t\t\t\tsc = result.FutureAPI.Response().StatusCode\n\t\t\t}\n\t\t\ttracing.EndSpan(ctx, sc, err)\n\t\t}()\n\t}\n\treq, err := client.UpdateTagsPreparer(ctx, resourceGroupName, expressRoutePortName, parameters)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"network.ExpressRoutePortsClient\", \"UpdateTags\", nil, \"Failure preparing request\")\n\t\treturn\n\t}\n\n\tresult, err = client.UpdateTagsSender(req)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"network.ExpressRoutePortsClient\", \"UpdateTags\", result.Response(), \"Failure sending request\")\n\t\treturn\n\t}\n\n\treturn\n}\n\n\/\/ UpdateTagsPreparer prepares the UpdateTags request.\nfunc (client ExpressRoutePortsClient) UpdateTagsPreparer(ctx context.Context, resourceGroupName string, expressRoutePortName string, parameters TagsObject) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"expressRoutePortName\": autorest.Encode(\"path\", expressRoutePortName),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2018-10-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application\/json; charset=utf-8\"),\n\t\tautorest.AsPatch(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"\/subscriptions\/{subscriptionId}\/resourceGroups\/{resourceGroupName}\/providers\/Microsoft.Network\/ExpressRoutePorts\/{expressRoutePortName}\", pathParameters),\n\t\tautorest.WithJSON(parameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}\n\n\/\/ UpdateTagsSender sends the UpdateTags request. The method will close the\n\/\/ http.Response Body if it receives an error.\nfunc (client ExpressRoutePortsClient) UpdateTagsSender(req *http.Request) (future ExpressRoutePortsUpdateTagsFuture, err error) {\n\tvar resp *http.Response\n\tfuture.FutureAPI = &azure.Future{}\n\tresp, err = client.Send(req, azure.DoRetryWithRegistration(client.Client))\n\tif err != nil {\n\t\treturn\n\t}\n\tvar azf azure.Future\n\tazf, err = azure.NewFutureFromResponse(resp)\n\tfuture.FutureAPI = &azf\n\tfuture.Result = future.result\n\treturn\n}\n\n\/\/ UpdateTagsResponder handles the response to the UpdateTags request. The method always\n\/\/ closes the http.Response Body.\nfunc (client ExpressRoutePortsClient) UpdateTagsResponder(resp *http.Response) (result ExpressRoutePort, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK),\n\t\tautorest.ByUnmarshallingJSON(&result),\n\t\tautorest.ByClosing())\n\tresult.Response = autorest.Response{Response: resp}\n\treturn\n}\n","avg_line_length":39.9156626506,"max_line_length":217,"alphanum_fraction":0.7722823509} +{"size":3060,"ext":"go","lang":"Go","max_stars_count":null,"content":"\/*\nCopyright 2016 The Kubernetes Authors.\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\nyou may not use this file except in compliance with the License.\nYou may obtain a copy of the License at\n\n http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\nUnless required by applicable law or agreed to in writing, software\ndistributed under the License is distributed on an \"AS IS\" BASIS,\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\nSee the License for the specific language governing permissions and\nlimitations under the License.\n*\/\n\n\/\/ Package triple generates key-certificate pairs for the\n\/\/ triple (CA, Server, Client).\npackage triple\n\nimport (\n\t\"crypto\/rsa\"\n\t\"crypto\/x509\"\n\t\"fmt\"\n\t\"net\"\n\n\tcertutil \"k8s.io\/client-go\/v6\/util\/cert\"\n)\n\ntype KeyPair struct {\n\tKey *rsa.PrivateKey\n\tCert *x509.Certificate\n}\n\nfunc NewCA(name string) (*KeyPair, error) {\n\tkey, err := certutil.NewPrivateKey()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"unable to create a private key for a new CA: %v\", err)\n\t}\n\n\tconfig := certutil.Config{\n\t\tCommonName: name,\n\t}\n\n\tcert, err := certutil.NewSelfSignedCACert(config, key)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"unable to create a self-signed certificate for a new CA: %v\", err)\n\t}\n\n\treturn &KeyPair{\n\t\tKey: key,\n\t\tCert: cert,\n\t}, nil\n}\n\nfunc NewServerKeyPair(ca *KeyPair, commonName, svcName, svcNamespace, dnsDomain string, ips, hostnames []string) (*KeyPair, error) {\n\tkey, err := certutil.NewPrivateKey()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"unable to create a server private key: %v\", err)\n\t}\n\n\tnamespacedName := fmt.Sprintf(\"%s.%s\", svcName, svcNamespace)\n\tinternalAPIServerFQDN := []string{\n\t\tsvcName,\n\t\tnamespacedName,\n\t\tfmt.Sprintf(\"%s.svc\", namespacedName),\n\t\tfmt.Sprintf(\"%s.svc.%s\", namespacedName, dnsDomain),\n\t}\n\n\taltNames := certutil.AltNames{}\n\tfor _, ipStr := range ips {\n\t\tip := net.ParseIP(ipStr)\n\t\tif ip != nil {\n\t\t\taltNames.IPs = append(altNames.IPs, ip)\n\t\t}\n\t}\n\taltNames.DNSNames = append(altNames.DNSNames, hostnames...)\n\taltNames.DNSNames = append(altNames.DNSNames, internalAPIServerFQDN...)\n\n\tconfig := certutil.Config{\n\t\tCommonName: commonName,\n\t\tAltNames: altNames,\n\t\tUsages: []x509.ExtKeyUsage{x509.ExtKeyUsageServerAuth},\n\t}\n\tcert, err := certutil.NewSignedCert(config, key, ca.Cert, ca.Key)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"unable to sign the server certificate: %v\", err)\n\t}\n\n\treturn &KeyPair{\n\t\tKey: key,\n\t\tCert: cert,\n\t}, nil\n}\n\nfunc NewClientKeyPair(ca *KeyPair, commonName string, organizations []string) (*KeyPair, error) {\n\tkey, err := certutil.NewPrivateKey()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"unable to create a client private key: %v\", err)\n\t}\n\n\tconfig := certutil.Config{\n\t\tCommonName: commonName,\n\t\tOrganization: organizations,\n\t\tUsages: []x509.ExtKeyUsage{x509.ExtKeyUsageClientAuth},\n\t}\n\tcert, err := certutil.NewSignedCert(config, key, ca.Cert, ca.Key)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"unable to sign the client certificate: %v\", err)\n\t}\n\n\treturn &KeyPair{\n\t\tKey: key,\n\t\tCert: cert,\n\t}, nil\n}\n","avg_line_length":26.1538461538,"max_line_length":132,"alphanum_fraction":0.704248366} +{"size":625,"ext":"go","lang":"Go","max_stars_count":null,"content":"package cmd\n\nimport (\n\t\"fmt\"\n\n\t\"github.com\/logrusorgru\/aurora\"\n\t\"github.com\/spf13\/cobra\"\n)\n\nvar tagFormatStr = fmt.Sprintf(\"- %s (%s)\",\n\taurora.Blue(\"%s\"), aurora.Cyan(\"%d\"))\n\nvar tagsCmd = &cobra.Command{\n\tUse: \"tags\",\n\tShort: \"List all tags\",\n\tLong: `Lists all tags used in the library.`,\n\tRun: func(cmd *cobra.Command, args []string) {\n\t\t\/\/ Load bookmarks from file\n\t\tbmks := ReadBookmarksFromFile()\n\n\t\t\/\/ Get tags\n\t\ttags := bmks.GetAllTags()\n\n\t\t\/\/ Print tags\n\t\tfor _, tag := range tags.Tags.Tags {\n\t\t\tfmt.Println(fmt.Sprintf(tagFormatStr, tag, tags.Count[tag]))\n\t\t}\n\t},\n}\n\nfunc init() {\n\trootCmd.AddCommand(tagsCmd)\n}\n","avg_line_length":18.3823529412,"max_line_length":63,"alphanum_fraction":0.648} +{"size":3423,"ext":"go","lang":"Go","max_stars_count":null,"content":"\/\/ Code generated by private\/model\/cli\/gen-api\/main.go. DO NOT EDIT.\n\npackage iot\n\nimport (\n\t\"github.com\/sotowang\/aws-sdk-go\/aws\"\n\t\"github.com\/sotowang\/aws-sdk-go\/aws\/client\"\n\t\"github.com\/sotowang\/aws-sdk-go\/aws\/client\/metadata\"\n\t\"github.com\/sotowang\/aws-sdk-go\/aws\/request\"\n\t\"github.com\/sotowang\/aws-sdk-go\/aws\/signer\/v4\"\n\t\"github.com\/sotowang\/aws-sdk-go\/private\/protocol\"\n\t\"github.com\/sotowang\/aws-sdk-go\/private\/protocol\/restjson\"\n)\n\n\/\/ IoT provides the API operation methods for making requests to\n\/\/ AWS IoT. See this package's package overview docs\n\/\/ for details on the service.\n\/\/\n\/\/ IoT methods are safe to use concurrently. It is not safe to\n\/\/ modify mutate any of the struct's properties though.\ntype IoT struct {\n\t*client.Client\n}\n\n\/\/ Used for custom client initialization logic\nvar initClient func(*client.Client)\n\n\/\/ Used for custom request initialization logic\nvar initRequest func(*request.Request)\n\n\/\/ Service information constants\nconst (\n\tServiceName = \"iot\" \/\/ Name of service.\n\tEndpointsID = ServiceName \/\/ ID to lookup a service endpoint with.\n\tServiceID = \"IoT\" \/\/ ServiceID is a unique identifier of a specific service.\n)\n\n\/\/ New creates a new instance of the IoT client with a session.\n\/\/ If additional configuration is needed for the client instance use the optional\n\/\/ aws.Config parameter to add your extra config.\n\/\/\n\/\/ Example:\n\/\/ mySession := session.Must(session.NewSession())\n\/\/\n\/\/ \/\/ Create a IoT client from just a session.\n\/\/ svc := iot.New(mySession)\n\/\/\n\/\/ \/\/ Create a IoT client with additional configuration\n\/\/ svc := iot.New(mySession, aws.NewConfig().WithRegion(\"us-west-2\"))\nfunc New(p client.ConfigProvider, cfgs ...*aws.Config) *IoT {\n\tc := p.ClientConfig(EndpointsID, cfgs...)\n\tif c.SigningNameDerived || len(c.SigningName) == 0 {\n\t\tc.SigningName = \"execute-api\"\n\t}\n\treturn newClient(*c.Config, c.Handlers, c.PartitionID, c.Endpoint, c.SigningRegion, c.SigningName, c.ResolvedRegion)\n}\n\n\/\/ newClient creates, initializes and returns a new service client instance.\nfunc newClient(cfg aws.Config, handlers request.Handlers, partitionID, endpoint, signingRegion, signingName, resolvedRegion string) *IoT {\n\tsvc := &IoT{\n\t\tClient: client.New(\n\t\t\tcfg,\n\t\t\tmetadata.ClientInfo{\n\t\t\t\tServiceName: ServiceName,\n\t\t\t\tServiceID: ServiceID,\n\t\t\t\tSigningName: signingName,\n\t\t\t\tSigningRegion: signingRegion,\n\t\t\t\tPartitionID: partitionID,\n\t\t\t\tEndpoint: endpoint,\n\t\t\t\tAPIVersion: \"2015-05-28\",\n\t\t\t\tResolvedRegion: resolvedRegion,\n\t\t\t},\n\t\t\thandlers,\n\t\t),\n\t}\n\n\t\/\/ Handlers\n\tsvc.Handlers.Sign.PushBackNamed(v4.SignRequestHandler)\n\tsvc.Handlers.Build.PushBackNamed(restjson.BuildHandler)\n\tsvc.Handlers.Unmarshal.PushBackNamed(restjson.UnmarshalHandler)\n\tsvc.Handlers.UnmarshalMeta.PushBackNamed(restjson.UnmarshalMetaHandler)\n\tsvc.Handlers.UnmarshalError.PushBackNamed(\n\t\tprotocol.NewUnmarshalErrorHandler(restjson.NewUnmarshalTypedError(exceptionFromCode)).NamedHandler(),\n\t)\n\n\t\/\/ Run custom client initialization if present\n\tif initClient != nil {\n\t\tinitClient(svc.Client)\n\t}\n\n\treturn svc\n}\n\n\/\/ newRequest creates a new request for a IoT operation and runs any\n\/\/ custom request initialization.\nfunc (c *IoT) newRequest(op *request.Operation, params, data interface{}) *request.Request {\n\treq := c.NewRequest(op, params, data)\n\n\t\/\/ Run custom request initialization if present\n\tif initRequest != nil {\n\t\tinitRequest(req)\n\t}\n\n\treturn req\n}\n","avg_line_length":32.2924528302,"max_line_length":138,"alphanum_fraction":0.7353198948} +{"size":25388,"ext":"go","lang":"Go","max_stars_count":1.0,"content":"\/\/ Copyright 2016 The G3N Authors. All rights reserved.\n\/\/ Use of this source code is governed by a BSD-style\n\/\/ license that can be found in the LICENSE file.\n\n\/\/ +build wasm\n\npackage gls\n\nimport (\n\t\"fmt\"\n\t\"syscall\/js\"\n\t\"unsafe\"\n)\n\n\/\/ GLS encapsulates the state of a WebGL context and contains\n\/\/ methods to call WebGL functions.\ntype GLS struct {\n\tstats Stats \/\/ statistics\n\tprog *Program \/\/ current active shader program\n\tprograms map[*Program]bool \/\/ shader programs cache\n\tcheckErrors bool \/\/ check openGL API errors flag\n\n\t\/\/ Cache WebGL state to avoid making unnecessary API calls\n\tactiveTexture uint32 \/\/ cached last set active texture unit\n\tviewportX int32 \/\/ cached last set viewport x\n\tviewportY int32 \/\/ cached last set viewport y\n\tviewportWidth int32 \/\/ cached last set viewport width\n\tviewportHeight int32 \/\/ cached last set viewport height\n\tlineWidth float32 \/\/ cached last set line width\n\tsideView int \/\/ cached last set triangle side view mode\n\tfrontFace uint32 \/\/ cached last set glFrontFace value\n\tdepthFunc uint32 \/\/ cached last set depth function\n\tdepthMask int \/\/ cached last set depth mask\n\tcapabilities map[int]int \/\/ cached capabilities (Enable\/Disable)\n\tblendEquation uint32 \/\/ cached last set blend equation value\n\tblendSrc uint32 \/\/ cached last set blend src value\n\tblendDst uint32 \/\/ cached last set blend equation destination value\n\tblendEquationRGB uint32 \/\/ cached last set blend equation rgb value\n\tblendEquationAlpha uint32 \/\/ cached last set blend equation alpha value\n\tblendSrcRGB uint32 \/\/ cached last set blend src rgb\n\tblendSrcAlpha uint32 \/\/ cached last set blend src alpha value\n\tblendDstRGB uint32 \/\/ cached last set blend destination rgb value\n\tblendDstAlpha uint32 \/\/ cached last set blend destination alpha value\n\tpolygonModeFace uint32 \/\/ cached last set polygon mode face\n\tpolygonModeMode uint32 \/\/ cached last set polygon mode mode\n\tpolygonOffsetFactor float32 \/\/ cached last set polygon offset factor\n\tpolygonOffsetUnits float32 \/\/ cached last set polygon offset units\n\n\t\/\/ js.Value storage maps\n\tprogramMap map[uint32]js.Value\n\tshaderMap map[uint32]js.Value\n\tbufferMap map[uint32]js.Value\n\tframebufferMap map[uint32]js.Value\n\trenderbufferMap map[uint32]js.Value\n\ttextureMap map[uint32]js.Value\n\tuniformMap map[uint32]js.Value\n\tvertexArrayMap map[uint32]js.Value\n\n\t\/\/ Next free index to be used for each map\n\tprogramMapIndex uint32\n\tshaderMapIndex uint32\n\tbufferMapIndex uint32\n\tframebufferMapIndex uint32\n\trenderbufferMapIndex uint32\n\ttextureMapIndex uint32\n\tuniformMapIndex uint32\n\tvertexArrayMapIndex uint32\n\n\t\/\/ Canvas and WebGL Context\n\tcanvas js.Value\n\tgl js.Value\n}\n\n\/\/ New creates and returns a new instance of a GLS object,\n\/\/ which encapsulates the state of an WebGL context.\n\/\/ This should be called only after an active WebGL context\n\/\/ is established, such as by creating a new window.\nfunc New(webglCtx js.Value) (*GLS, error) {\n\n\tgs := new(GLS)\n\tgs.reset()\n\tgs.checkErrors = false\n\tgs.gl = webglCtx\n\n\t\/\/ Create js.Value storage maps\n\tgs.programMap = make(map[uint32]js.Value)\n\tgs.shaderMap = make(map[uint32]js.Value)\n\tgs.bufferMap = make(map[uint32]js.Value)\n\tgs.framebufferMap = make(map[uint32]js.Value)\n\tgs.renderbufferMap = make(map[uint32]js.Value)\n\tgs.textureMap = make(map[uint32]js.Value)\n\tgs.uniformMap = make(map[uint32]js.Value)\n\tgs.vertexArrayMap = make(map[uint32]js.Value)\n\n\t\/\/ Initialize indexes to be used with the maps above\n\tgs.programMapIndex = 1\n\tgs.shaderMapIndex = 1\n\tgs.bufferMapIndex = 1\n\tgs.framebufferMapIndex = 1\n\tgs.renderbufferMapIndex = 1\n\tgs.textureMapIndex = 1\n\tgs.uniformMapIndex = 1\n\tgs.vertexArrayMapIndex = 1\n\n\tgs.setDefaultState()\n\treturn gs, nil\n}\n\n\/\/ SetCheckErrors enables\/disables checking for errors after the\n\/\/ call of any WebGL function. It is enabled by default but\n\/\/ could be disabled after an application is stable to improve the performance.\nfunc (gs *GLS) SetCheckErrors(enable bool) {\n\n\tgs.checkErrors = enable\n}\n\n\/\/ CheckErrors returns if error checking is enabled or not.\nfunc (gs *GLS) CheckErrors() bool {\n\n\treturn gs.checkErrors\n}\n\n\/\/ reset resets the internal state kept of the WebGL\nfunc (gs *GLS) reset() {\n\n\tgs.lineWidth = 0.0\n\tgs.sideView = uintUndef\n\tgs.frontFace = 0\n\tgs.depthFunc = 0\n\tgs.depthMask = uintUndef\n\tgs.capabilities = make(map[int]int)\n\tgs.programs = make(map[*Program]bool)\n\tgs.prog = nil\n\n\tgs.activeTexture = uintUndef\n\tgs.blendEquation = uintUndef\n\tgs.blendSrc = uintUndef\n\tgs.blendDst = uintUndef\n\tgs.blendEquationRGB = 0\n\tgs.blendEquationAlpha = 0\n\tgs.blendSrcRGB = uintUndef\n\tgs.blendSrcAlpha = uintUndef\n\tgs.blendDstRGB = uintUndef\n\tgs.blendDstAlpha = uintUndef\n\tgs.polygonModeFace = 0\n\tgs.polygonModeMode = 0\n\tgs.polygonOffsetFactor = -1\n\tgs.polygonOffsetUnits = -1\n}\n\n\/\/ setDefaultState is used internally to set the initial state of WebGL\n\/\/ for this context.\nfunc (gs *GLS) setDefaultState() {\n\n\tgs.ClearColor(0, 0, 0, 1)\n\tgs.ClearDepth(1)\n\tgs.ClearStencil(0)\n\tgs.Enable(DEPTH_TEST)\n\tgs.DepthFunc(LEQUAL)\n\tgs.FrontFace(CCW)\n\tgs.CullFace(BACK)\n\tgs.Enable(CULL_FACE)\n\tgs.Enable(BLEND)\n\tgs.BlendEquation(FUNC_ADD)\n\tgs.BlendFunc(SRC_ALPHA, ONE_MINUS_SRC_ALPHA)\n\n\t\/\/ TODO commented constants not available in WebGL\n\t\/\/gs.Enable(VERTEX_PROGRAM_POINT_SIZE)\n\t\/\/gs.Enable(PROGRAM_POINT_SIZE)\n\t\/\/gs.Enable(MULTISAMPLE)\n\tgs.Enable(POLYGON_OFFSET_FILL)\n\t\/\/gs.Enable(POLYGON_OFFSET_LINE)\n\t\/\/gs.Enable(POLYGON_OFFSET_POINT)\n}\n\n\/\/ Stats copy the current values of the internal statistics structure\n\/\/ to the specified pointer.\nfunc (gs *GLS) Stats(s *Stats) {\n\n\t*s = gs.stats\n\ts.Shaders = len(gs.programs)\n}\n\n\/\/ ActiveTexture selects which texture unit subsequent texture state calls\n\/\/ will affect. The number of texture units an implementation supports is\n\/\/ implementation dependent, but must be at least 48 in GL 3.3.\nfunc (gs *GLS) ActiveTexture(texture uint32) {\n\n\tif gs.activeTexture == texture {\n\t\treturn\n\t}\n\tgs.gl.Call(\"activeTexture\", int(texture))\n\tgs.checkError(\"ActiveTexture\")\n\tgs.activeTexture = texture\n}\n\n\/\/ AttachShader attaches the specified shader object to the specified program object.\nfunc (gs *GLS) AttachShader(program, shader uint32) {\n\n\tgs.gl.Call(\"attachShader\", gs.programMap[program], gs.shaderMap[shader])\n\tgs.checkError(\"AttachShader\")\n}\n\n\/\/ BindBuffer binds a buffer object to the specified buffer binding point.\nfunc (gs *GLS) BindBuffer(target int, vbo uint32) {\n\n\tgs.gl.Call(\"bindBuffer\", target, gs.bufferMap[vbo])\n\tgs.checkError(\"BindBuffer\")\n}\n\n\/\/ BindTexture lets you create or use a named texture.\nfunc (gs *GLS) BindTexture(target int, tex uint32) {\n\n\tgs.gl.Call(\"bindTexture\", target, gs.textureMap[tex])\n\tgs.checkError(\"BindTexture\")\n}\n\n\/\/ BindVertexArray binds the vertex array object.\nfunc (gs *GLS) BindVertexArray(vao uint32) {\n\n\tgs.gl.Call(\"bindVertexArray\", gs.vertexArrayMap[vao])\n\tgs.checkError(\"BindVertexArray\")\n}\n\n\/\/ BlendEquation sets the blend equations for all draw buffers.\nfunc (gs *GLS) BlendEquation(mode uint32) {\n\n\tif gs.blendEquation == mode {\n\t\treturn\n\t}\n\tgs.gl.Call(\"blendEquation\", int(mode))\n\tgs.checkError(\"BlendEquation\")\n\tgs.blendEquation = mode\n}\n\n\/\/ BlendEquationSeparate sets the blend equations for all draw buffers\n\/\/ allowing different equations for the RGB and alpha components.\nfunc (gs *GLS) BlendEquationSeparate(modeRGB uint32, modeAlpha uint32) {\n\n\tif gs.blendEquationRGB == modeRGB && gs.blendEquationAlpha == modeAlpha {\n\t\treturn\n\t}\n\tgs.gl.Call(\"blendEquationSeparate\", int(modeRGB), int(modeAlpha))\n\tgs.checkError(\"BlendEquationSeparate\")\n\tgs.blendEquationRGB = modeRGB\n\tgs.blendEquationAlpha = modeAlpha\n}\n\n\/\/ BlendFunc defines the operation of blending for\n\/\/ all draw buffers when blending is enabled.\nfunc (gs *GLS) BlendFunc(sfactor, dfactor uint32) {\n\n\tif gs.blendSrc == sfactor && gs.blendDst == dfactor {\n\t\treturn\n\t}\n\tgs.gl.Call(\"blendFunc\", int(sfactor), int(dfactor))\n\tgs.checkError(\"BlendFunc\")\n\tgs.blendSrc = sfactor\n\tgs.blendDst = dfactor\n}\n\n\/\/ BlendFuncSeparate defines the operation of blending for all draw buffers when blending\n\/\/ is enabled, allowing different operations for the RGB and alpha components.\nfunc (gs *GLS) BlendFuncSeparate(srcRGB uint32, dstRGB uint32, srcAlpha uint32, dstAlpha uint32) {\n\n\tif gs.blendSrcRGB == srcRGB && gs.blendDstRGB == dstRGB &&\n\t\tgs.blendSrcAlpha == srcAlpha && gs.blendDstAlpha == dstAlpha {\n\t\treturn\n\t}\n\tgs.gl.Call(\"blendFuncSeparate\", int(srcRGB), int(dstRGB), int(srcAlpha), int(dstAlpha))\n\tgs.checkError(\"BlendFuncSeparate\")\n\tgs.blendSrcRGB = srcRGB\n\tgs.blendDstRGB = dstRGB\n\tgs.blendSrcAlpha = srcAlpha\n\tgs.blendDstAlpha = dstAlpha\n}\n\n\/\/ BufferData creates a new data store for the buffer object currently\n\/\/ bound to target, deleting any pre-existing data store.\nfunc (gs *GLS) BufferData(target uint32, size int, data interface{}, usage uint32) {\n\n\tdataTA := js.TypedArrayOf(data)\n\tgs.gl.Call(\"bufferData\", int(target), dataTA, int(usage))\n\tgs.checkError(\"BufferData\")\n\tdataTA.Release()\n}\n\n\/\/ ClearColor specifies the red, green, blue, and alpha values\n\/\/ used by glClear to clear the color buffers.\nfunc (gs *GLS) ClearColor(r, g, b, a float32) {\n\n\tgs.gl.Call(\"clearColor\", r, g, b, a)\n\tgs.checkError(\"ClearColor\")\n}\n\n\/\/ ClearDepth specifies the depth value used by Clear to clear the depth buffer.\nfunc (gs *GLS) ClearDepth(v float32) {\n\n\tgs.gl.Call(\"clearDepth\", v)\n\tgs.checkError(\"ClearDepth\")\n}\n\n\/\/ ClearStencil specifies the index used by Clear to clear the stencil buffer.\nfunc (gs *GLS) ClearStencil(v int32) {\n\n\tgs.gl.Call(\"clearStencil\", int(v))\n\tgs.checkError(\"ClearStencil\")\n}\n\n\/\/ Clear sets the bitplane area of the window to values previously\n\/\/ selected by ClearColor, ClearDepth, and ClearStencil.\nfunc (gs *GLS) Clear(mask uint) {\n\n\tgs.gl.Call(\"clear\", int(mask))\n\tgs.checkError(\"Clear\")\n}\n\n\/\/ CompileShader compiles the source code strings that\n\/\/ have been stored in the specified shader object.\nfunc (gs *GLS) CompileShader(shader uint32) {\n\n\tgs.gl.Call(\"compileShader\", gs.shaderMap[shader])\n\tgs.checkError(\"CompileShader\")\n}\n\n\/\/ CreateProgram creates an empty program object and returns\n\/\/ a non-zero value by which it can be referenced.\nfunc (gs *GLS) CreateProgram() uint32 {\n\n\tgs.programMap[gs.programMapIndex] = gs.gl.Call(\"createProgram\")\n\tgs.checkError(\"CreateProgram\")\n\tidx := gs.programMapIndex\n\tgs.programMapIndex++\n\treturn idx\n}\n\n\/\/ CreateShader creates an empty shader object and returns\n\/\/ a non-zero value by which it can be referenced.\nfunc (gs *GLS) CreateShader(stype uint32) uint32 {\n\n\tgs.shaderMap[gs.shaderMapIndex] = gs.gl.Call(\"createShader\", int(stype))\n\tgs.checkError(\"CreateShader\")\n\tidx := gs.shaderMapIndex\n\tgs.shaderMapIndex++\n\treturn idx\n}\n\n\/\/ DeleteBuffers deletes n\u200bbuffer objects named\n\/\/ by the elements of the provided array.\nfunc (gs *GLS) DeleteBuffers(bufs ...uint32) {\n\n\tfor _, buf := range bufs {\n\t\tgs.gl.Call(\"deleteBuffer\", gs.bufferMap[buf])\n\t\tgs.checkError(\"DeleteBuffers\")\n\t\tgs.stats.Buffers--\n\t\tdelete(gs.bufferMap, buf)\n\t}\n}\n\n\/\/ DeleteShader frees the memory and invalidates the name\n\/\/ associated with the specified shader object.\nfunc (gs *GLS) DeleteShader(shader uint32) {\n\n\tgs.gl.Call(\"deleteShader\", gs.shaderMap[shader])\n\tgs.checkError(\"DeleteShader\")\n\tdelete(gs.shaderMap, shader)\n}\n\n\/\/ DeleteProgram frees the memory and invalidates the name\n\/\/ associated with the specified program object.\nfunc (gs *GLS) DeleteProgram(program uint32) {\n\n\tgs.gl.Call(\"deleteProgram\", gs.programMap[program])\n\tgs.checkError(\"DeleteProgram\")\n\tdelete(gs.programMap, program)\n}\n\n\/\/ DeleteTextures deletes n\u200btextures named\n\/\/ by the elements of the provided array.\nfunc (gs *GLS) DeleteTextures(tex ...uint32) {\n\n\tfor _, t := range tex {\n\t\tgs.gl.Call(\"deleteTexture\", gs.textureMap[t])\n\t\tgs.checkError(\"DeleteTextures\")\n\t\tdelete(gs.textureMap, t)\n\t\tgs.stats.Textures--\n\t}\n}\n\n\/\/ DeleteVertexArrays deletes n\u200bvertex array objects named\n\/\/ by the elements of the provided array.\nfunc (gs *GLS) DeleteVertexArrays(vaos ...uint32) {\n\n\tfor _, v := range vaos {\n\t\tgs.gl.Call(\"deleteVertexArray\", gs.vertexArrayMap[v])\n\t\tgs.checkError(\"DeleteVertexArrays\")\n\t\tdelete(gs.vertexArrayMap, v)\n\t\tgs.stats.Vaos--\n\t}\n}\n\n\/\/ TODO ReadPixels\n\n\/\/ DepthFunc specifies the function used to compare each incoming pixel\n\/\/ depth value with the depth value present in the depth buffer.\nfunc (gs *GLS) DepthFunc(mode uint32) {\n\n\tif gs.depthFunc == mode {\n\t\treturn\n\t}\n\tgs.gl.Call(\"depthFunc\", int(mode))\n\tgs.checkError(\"DepthFunc\")\n\tgs.depthFunc = mode\n}\n\n\/\/ DepthMask enables or disables writing into the depth buffer.\nfunc (gs *GLS) DepthMask(flag bool) {\n\n\tif gs.depthMask == intTrue && flag {\n\t\treturn\n\t}\n\tif gs.depthMask == intFalse && !flag {\n\t\treturn\n\t}\n\tgs.gl.Call(\"depthMask\", flag)\n\tgs.checkError(\"DepthMask\")\n\tif flag {\n\t\tgs.depthMask = intTrue\n\t} else {\n\t\tgs.depthMask = intFalse\n\t}\n}\n\n\/\/ DrawArrays renders primitives from array data.\nfunc (gs *GLS) DrawArrays(mode uint32, first int32, count int32) {\n\n\tgs.gl.Call(\"drawArrays\", int(mode), first, count)\n\tgs.checkError(\"DrawArrays\")\n\tgs.stats.Drawcalls++\n}\n\n\/\/ DrawElements renders primitives from array data.\nfunc (gs *GLS) DrawElements(mode uint32, count int32, itype uint32, start uint32) {\n\n\tgs.gl.Call(\"drawElements\", int(mode), count, int(itype), start)\n\tgs.checkError(\"DrawElements\")\n\tgs.stats.Drawcalls++\n}\n\n\/\/ Enable enables the specified capability.\nfunc (gs *GLS) Enable(cap int) {\n\n\tif gs.capabilities[cap] == capEnabled {\n\t\tgs.stats.Caphits++\n\t\treturn\n\t}\n\tgs.gl.Call(\"enable\", int32(cap))\n\tgs.checkError(\"Enable\")\n\tgs.capabilities[cap] = capEnabled\n}\n\n\/\/ Disable disables the specified capability.\nfunc (gs *GLS) Disable(cap int) {\n\n\tif gs.capabilities[cap] == capDisabled {\n\t\tgs.stats.Caphits++\n\t\treturn\n\t}\n\tgs.gl.Call(\"disable\", cap)\n\tgs.checkError(\"Disable\")\n\tgs.capabilities[cap] = capDisabled\n}\n\n\/\/ EnableVertexAttribArray enables a generic vertex attribute array.\nfunc (gs *GLS) EnableVertexAttribArray(index uint32) {\n\n\tgs.gl.Call(\"enableVertexAttribArray\", index)\n\tgs.checkError(\"EnableVertexAttribArray\")\n}\n\n\/\/ CullFace specifies whether front- or back-facing facets can be culled.\nfunc (gs *GLS) CullFace(mode uint32) {\n\n\tgs.gl.Call(\"cullFace\", int(mode))\n\tgs.checkError(\"CullFace\")\n}\n\n\/\/ FrontFace defines front- and back-facing polygons.\nfunc (gs *GLS) FrontFace(mode uint32) {\n\n\tif gs.frontFace == mode {\n\t\treturn\n\t}\n\tgs.gl.Call(\"frontFace\", int(mode))\n\tgs.checkError(\"FrontFace\")\n\tgs.frontFace = mode\n}\n\n\/\/ GenBuffer generates a \u200bbuffer object name.\nfunc (gs *GLS) GenBuffer() uint32 {\n\n\tgs.bufferMap[gs.bufferMapIndex] = gs.gl.Call(\"createBuffer\")\n\tgs.checkError(\"CreateBuffer\")\n\tidx := gs.bufferMapIndex\n\tgs.bufferMapIndex++\n\tgs.stats.Buffers++\n\treturn idx\n}\n\n\/\/ GenerateMipmap generates mipmaps for the specified texture target.\nfunc (gs *GLS) GenerateMipmap(target uint32) {\n\n\tgs.gl.Call(\"generateMipmap\", int(target))\n\tgs.checkError(\"GenerateMipmap\")\n}\n\n\/\/ GenTexture generates a texture object name.\nfunc (gs *GLS) GenTexture() uint32 {\n\n\tgs.textureMap[gs.textureMapIndex] = gs.gl.Call(\"createTexture\")\n\tgs.checkError(\"GenTexture\")\n\tidx := gs.textureMapIndex\n\tgs.textureMapIndex++\n\tgs.stats.Textures++\n\treturn idx\n}\n\n\/\/ GenVertexArray generates a vertex array object name.\nfunc (gs *GLS) GenVertexArray() uint32 {\n\n\tgs.vertexArrayMap[gs.vertexArrayMapIndex] = gs.gl.Call(\"createVertexArray\")\n\tgs.checkError(\"GenVertexArray\")\n\tidx := gs.vertexArrayMapIndex\n\tgs.vertexArrayMapIndex++\n\tgs.stats.Vaos++\n\treturn idx\n}\n\n\/\/ GetAttribLocation returns the location of the specified attribute variable.\nfunc (gs *GLS) GetAttribLocation(program uint32, name string) int32 {\n\n\tloc := gs.gl.Call(\"getAttribLocation\", gs.programMap[program], name).Int()\n\tgs.checkError(\"GetAttribLocation\")\n\treturn int32(loc)\n}\n\n\/\/ GetProgramiv returns the specified parameter from the specified program object.\nfunc (gs *GLS) GetProgramiv(program, pname uint32, params *int32) {\n\n\tsparam := gs.gl.Call(\"getProgramParameter\", gs.programMap[program], int(pname))\n\tgs.checkError(\"GetProgramiv\")\n\tswitch pname {\n\tcase DELETE_STATUS, LINK_STATUS, VALIDATE_STATUS:\n\t\tif sparam.Bool() {\n\t\t\t*params = TRUE\n\t\t} else {\n\t\t\t*params = FALSE\n\t\t}\n\tdefault:\n\t\t*params = int32(sparam.Int())\n\t}\n}\n\n\/\/ GetProgramInfoLog returns the information log for the specified program object.\nfunc (gs *GLS) GetProgramInfoLog(program uint32) string {\n\n\tres := gs.gl.Call(\"getProgramInfoLog\", gs.programMap[program]).String()\n\tgs.checkError(\"GetProgramInfoLog\")\n\treturn res\n}\n\n\/\/ GetShaderInfoLog returns the information log for the specified shader object.\nfunc (gs *GLS) GetShaderInfoLog(shader uint32) string {\n\n\tres := gs.gl.Call(\"getShaderInfoLog\", gs.shaderMap[shader]).String()\n\tgs.checkError(\"GetShaderInfoLog\")\n\treturn res\n}\n\n\/\/ GetString returns a string describing the specified aspect of the current GL connection.\nfunc (gs *GLS) GetString(name uint32) string {\n\n\tres := gs.gl.Call(\"getParameter\", int(name)).String()\n\tgs.checkError(\"GetString\")\n\treturn res\n}\n\n\/\/ GetUniformLocation returns the location of a uniform variable for the specified program.\nfunc (gs *GLS) GetUniformLocation(program uint32, name string) int32 {\n\n\tloc := gs.gl.Call(\"getUniformLocation\", gs.programMap[program], name)\n\tif loc == js.Null() {\n\t\treturn -1\n\t}\n\tgs.uniformMap[gs.uniformMapIndex] = loc\n\tgs.checkError(\"GetUniformLocation\")\n\tidx := gs.uniformMapIndex\n\tgs.uniformMapIndex++\n\treturn int32(idx)\n}\n\n\/\/ GetViewport returns the current viewport information.\nfunc (gs *GLS) GetViewport() (x, y, width, height int32) {\n\n\treturn gs.viewportX, gs.viewportY, gs.viewportWidth, gs.viewportHeight\n}\n\n\/\/ LineWidth specifies the rasterized width of both aliased and antialiased lines.\nfunc (gs *GLS) LineWidth(width float32) {\n\n\tif gs.lineWidth == width {\n\t\treturn\n\t}\n\tgs.gl.Call(\"lineWidth\", width)\n\tgs.checkError(\"LineWidth\")\n\tgs.lineWidth = width\n}\n\n\/\/ LinkProgram links the specified program object.\nfunc (gs *GLS) LinkProgram(program uint32) {\n\n\tgs.gl.Call(\"linkProgram\", gs.programMap[program])\n\tgs.checkError(\"LinkProgram\")\n}\n\n\/\/ GetShaderiv returns the specified parameter from the specified shader object.\nfunc (gs *GLS) GetShaderiv(shader, pname uint32, params *int32) {\n\n\tsparam := gs.gl.Call(\"getShaderParameter\", gs.shaderMap[shader], int(pname))\n\tgs.checkError(\"GetShaderiv\")\n\tswitch pname {\n\tcase DELETE_STATUS, COMPILE_STATUS:\n\t\tif sparam.Bool() {\n\t\t\t*params = TRUE\n\t\t} else {\n\t\t\t*params = FALSE\n\t\t}\n\tdefault:\n\t\t*params = int32(sparam.Int())\n\t}\n}\n\n\/\/ Scissor defines the scissor box rectangle in window coordinates.\nfunc (gs *GLS) Scissor(x, y int32, width, height uint32) {\n\n\tgs.gl.Call(\"scissor\", x, y, int(width), int(height))\n\tgs.checkError(\"Scissor\")\n}\n\n\/\/ ShaderSource sets the source code for the specified shader object.\nfunc (gs *GLS) ShaderSource(shader uint32, src string) {\n\n\tgs.gl.Call(\"shaderSource\", gs.shaderMap[shader], src)\n\tgs.checkError(\"ShaderSource\")\n}\n\n\/\/ TexImage2D specifies a two-dimensional texture image.\nfunc (gs *GLS) TexImage2D(target uint32, level int32, iformat int32, width int32, height int32, format uint32, itype uint32, data interface{}) {\n\n\tdataTA := js.TypedArrayOf(data)\n\tgs.gl.Call(\"texImage2D\", int(target), level, iformat, width, height, 0, int(format), int(itype), dataTA)\n\tgs.checkError(\"TexImage2D\")\n\tdataTA.Release()\n}\n\n\/\/ TexParameteri sets the specified texture parameter on the specified texture.\nfunc (gs *GLS) TexParameteri(target uint32, pname uint32, param int32) {\n\n\tgs.gl.Call(\"texParameteri\", int(target), int(pname), param)\n\tgs.checkError(\"TexParameteri\")\n}\n\n\/\/ PolygonMode controls the interpretation of polygons for rasterization.\nfunc (gs *GLS) PolygonMode(face, mode uint32) {\n\n\tlog.Warn(\"PolygonMode not available in WebGL\")\n}\n\n\/\/ PolygonOffset sets the scale and units used to calculate depth values.\nfunc (gs *GLS) PolygonOffset(factor float32, units float32) {\n\n\tif gs.polygonOffsetFactor == factor && gs.polygonOffsetUnits == units {\n\t\treturn\n\t}\n\tgs.gl.Call(\"polygonOffset\", factor, units)\n\tgs.checkError(\"PolygonOffset\")\n\tgs.polygonOffsetFactor = factor\n\tgs.polygonOffsetUnits = units\n}\n\n\/\/ Uniform1i sets the value of an int uniform variable for the current program object.\nfunc (gs *GLS) Uniform1i(location int32, v0 int32) {\n\n\tgs.gl.Call(\"uniform1i\", gs.uniformMap[uint32(location)], v0)\n\tgs.checkError(\"Uniform1i\")\n\tgs.stats.Unisets++\n}\n\n\/\/ Uniform1f sets the value of a float uniform variable for the current program object.\nfunc (gs *GLS) Uniform1f(location int32, v0 float32) {\n\n\tgs.gl.Call(\"uniform1f\", gs.uniformMap[uint32(location)], v0)\n\tgs.checkError(\"Uniform1f\")\n\tgs.stats.Unisets++\n}\n\n\/\/ Uniform2f sets the value of a vec2 uniform variable for the current program object.\nfunc (gs *GLS) Uniform2f(location int32, v0, v1 float32) {\n\n\tgs.gl.Call(\"uniform2f\", gs.uniformMap[uint32(location)], v0, v1)\n\tgs.checkError(\"Uniform2f\")\n\tgs.stats.Unisets++\n}\n\n\/\/ Uniform3f sets the value of a vec3 uniform variable for the current program object.\nfunc (gs *GLS) Uniform3f(location int32, v0, v1, v2 float32) {\n\n\tgs.gl.Call(\"uniform3f\", gs.uniformMap[uint32(location)], v0, v1, v2)\n\tgs.checkError(\"Uniform3f\")\n\tgs.stats.Unisets++\n}\n\n\/\/ Uniform4f sets the value of a vec4 uniform variable for the current program object.\nfunc (gs *GLS) Uniform4f(location int32, v0, v1, v2, v3 float32) {\n\n\tgs.gl.Call(\"uniform4f\", gs.uniformMap[uint32(location)], v0, v1, v2, v3)\n\tgs.checkError(\"Uniform4f\")\n\tgs.stats.Unisets++\n}\n\n\/\/\/\/ UniformMatrix3fv sets the value of one or many 3x3 float matrices for the current program object.\nfunc (gs *GLS) UniformMatrix3fv(location int32, count int32, transpose bool, pm *float32) {\n\n\tdata := (*[1 << 30]float32)(unsafe.Pointer(pm))[:9*count]\n\tdataTA := js.TypedArrayOf(data)\n\tgs.gl.Call(\"uniformMatrix3fv\", gs.uniformMap[uint32(location)], transpose, dataTA)\n\tdataTA.Release()\n\tgs.checkError(\"UniformMatrix3fv\")\n\tgs.stats.Unisets++\n}\n\n\/\/ UniformMatrix4fv sets the value of one or many 4x4 float matrices for the current program object.\nfunc (gs *GLS) UniformMatrix4fv(location int32, count int32, transpose bool, pm *float32) {\n\n\tdata := (*[1 << 30]float32)(unsafe.Pointer(pm))[:16*count]\n\tdataTA := js.TypedArrayOf(data)\n\tgs.gl.Call(\"uniformMatrix4fv\", gs.uniformMap[uint32(location)], transpose, dataTA)\n\tdataTA.Release()\n\tgs.checkError(\"UniformMatrix4fv\")\n\tgs.stats.Unisets++\n}\n\n\/\/ Uniform1fv sets the value of one or many float uniform variables for the current program object.\nfunc (gs *GLS) Uniform1fv(location int32, count int32, v *float32) {\n\n\tdata := (*[1 << 30]float32)(unsafe.Pointer(v))[:count]\n\tdataTA := js.TypedArrayOf(data)\n\tgs.gl.Call(\"uniform1fv\", gs.uniformMap[uint32(location)], dataTA)\n\tdataTA.Release()\n\tgs.checkError(\"Uniform1fv\")\n\tgs.stats.Unisets++\n}\n\n\/\/ Uniform2fv sets the value of one or many vec2 uniform variables for the current program object.\nfunc (gs *GLS) Uniform2fv(location int32, count int32, v *float32) {\n\n\tdata := (*[1 << 30]float32)(unsafe.Pointer(v))[:2*count]\n\tdataTA := js.TypedArrayOf(data)\n\tgs.gl.Call(\"uniform2fv\", gs.uniformMap[uint32(location)], dataTA)\n\tdataTA.Release()\n\tgs.checkError(\"Uniform2fv\")\n\tgs.stats.Unisets++\n}\n\n\/\/ Uniform3fv sets the value of one or many vec3 uniform variables for the current program object.\nfunc (gs *GLS) Uniform3fv(location int32, count int32, v *float32) {\n\n\tdata := (*[1 << 30]float32)(unsafe.Pointer(v))[:3*count]\n\tdataTA := js.TypedArrayOf(data)\n\tgs.gl.Call(\"uniform3fv\", gs.uniformMap[uint32(location)], dataTA)\n\tdataTA.Release()\n\tgs.checkError(\"Uniform3fv\")\n\tgs.stats.Unisets++\n}\n\n\/\/ Uniform4fv sets the value of one or many vec4 uniform variables for the current program object.\nfunc (gs *GLS) Uniform4fv(location int32, count int32, v *float32) {\n\n\tdata := (*[1 << 30]float32)(unsafe.Pointer(v))[:4*count]\n\tdataTA := js.TypedArrayOf(data)\n\tgs.gl.Call(\"uniform4fv\", gs.uniformMap[uint32(location)], dataTA)\n\tdataTA.Release()\n\tgs.checkError(\"Uniform4fv\")\n\tgs.stats.Unisets++\n}\n\n\/\/ VertexAttribPointer defines an array of generic vertex attribute data.\nfunc (gs *GLS) VertexAttribPointer(index uint32, size int32, xtype uint32, normalized bool, stride int32, offset uint32) {\n\n\tgs.gl.Call(\"vertexAttribPointer\", index, size, int(xtype), normalized, stride, offset)\n\tgs.checkError(\"VertexAttribPointer\")\n}\n\n\/\/ Viewport sets the viewport.\nfunc (gs *GLS) Viewport(x, y, width, height int32) {\n\n\tgs.gl.Call(\"viewport\", x, y, width, height)\n\tgs.checkError(\"Viewport\")\n\tgs.viewportX = x\n\tgs.viewportY = y\n\tgs.viewportWidth = width\n\tgs.viewportHeight = height\n}\n\n\/\/ UseProgram sets the specified program as the current program.\nfunc (gs *GLS) UseProgram(prog *Program) {\n\n\tif prog.handle == 0 {\n\t\tpanic(\"Invalid program\")\n\t}\n\n\tgs.gl.Call(\"useProgram\", gs.programMap[prog.handle])\n\tgs.checkError(\"UseProgram\")\n\tgs.prog = prog\n\n\t\/\/ Inserts program in cache if not already there.\n\tif !gs.programs[prog] {\n\t\tgs.programs[prog] = true\n\t\tlog.Debug(\"New Program activated. Total: %d\", len(gs.programs))\n\t}\n}\n\n\/\/ checkError checks if there are any WebGL errors and panics if so.\nfunc (gs *GLS) checkError(name string) {\n\n\tif !gs.checkErrors {\n\t\treturn\n\t}\n\terr := gs.gl.Call(\"getError\")\n\tif err.Int() != NO_ERROR {\n\t\tpanic(fmt.Sprintf(\"%s error: %v\", name, err))\n\t}\n}\n","avg_line_length":30.0805687204,"max_line_length":144,"alphanum_fraction":0.7281392784} +{"size":1158,"ext":"go","lang":"Go","max_stars_count":null,"content":"package main\n\nimport (\n\t\"fmt\"\n\t\"log\"\n\t\"os\"\n\t\"strconv\"\n\t\"strings\"\n\n\t\"github.com\/blocksafe\/go\/keypair\"\n)\n\nvar prefix string\n\nconst alphabet = \"ABCDEFGHIJKLMNOPQRSTUVWXYZ234567\"\n\nfunc main() {\n\n\tif len(os.Args) != 2 {\n\t\tusage()\n\t\tos.Exit(1)\n\t}\n\n\tprefix = strings.ToUpper(os.Args[1])\n\tcheckPlausible()\n\n\tfor {\n\t\tkp, err := keypair.Random()\n\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\n\t\t\/\/ NOTE: the first letter of an address will always be G, and the second letter will be one of only a few\n\t\t\/\/ possibilities in the base32 alphabet, so we are actually searching for the vanity value after this 2\n\t\t\/\/ character prefix.\n\t\tif strings.HasPrefix(kp.Address()[2:], prefix) {\n\t\t\tfmt.Println(\"Found!\")\n\t\t\tfmt.Printf(\"Secret seed: %s\\n\", kp.Seed())\n\t\t\tfmt.Printf(\"Public: %s\\n\", kp.Address())\n\t\t\tos.Exit(0)\n\t\t}\n\t}\n}\n\nfunc usage() {\n\tfmt.Printf(\"Usage:\\n\\tblocksafe-vanity-gen PREFIX\\n\")\n}\n\n\/\/ aborts the attempt if a desired character is not a valid base32 digit\nfunc checkPlausible() {\n\tfor _, r := range prefix {\n\t\tif !strings.ContainsRune(alphabet, r) {\n\t\t\tfmt.Printf(\"Invalid prefix: %s is not in the base32 alphabet\\n\", strconv.QuoteRune(r))\n\t\t\tos.Exit(1)\n\t\t}\n\t}\n}\n","avg_line_length":19.6271186441,"max_line_length":107,"alphanum_fraction":0.6632124352} +{"size":14822,"ext":"go","lang":"Go","max_stars_count":null,"content":"\/\/ Copyright 2021 PingCAP, Inc.\n\/\/ Copyright 2015 CoreOS, Inc.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\npackage writer\n\nimport (\n\t\"context\"\n\t\"encoding\/binary\"\n\t\"fmt\"\n\t\"io\"\n\t\"io\/ioutil\"\n\t\"net\/url\"\n\t\"os\"\n\t\"path\/filepath\"\n\t\"sync\"\n\t\"time\"\n\n\t\"github.com\/pingcap\/errors\"\n\t\"github.com\/pingcap\/log\"\n\t\"github.com\/pingcap\/tidb\/br\/pkg\/storage\"\n\t\"github.com\/pingcap\/tiflow\/cdc\/model\"\n\t\"github.com\/prometheus\/client_golang\/prometheus\"\n\t\"github.com\/uber-go\/atomic\"\n\tpioutil \"go.etcd.io\/etcd\/pkg\/v3\/ioutil\"\n\t\"go.uber.org\/multierr\"\n\t\"go.uber.org\/zap\"\n\n\t\"github.com\/pingcap\/tiflow\/cdc\/redo\/common\"\n\tcerror \"github.com\/pingcap\/tiflow\/pkg\/errors\"\n)\n\nconst (\n\t\/\/ pageBytes is the alignment for flushing records to the backing Writer.\n\t\/\/ It should be a multiple of the minimum sector size so that log can safely\n\t\/\/ distinguish between torn writes and ordinary data corruption.\n\tpageBytes = 8 * common.MinSectorSize\n)\n\nconst (\n\tdefaultFlushIntervalInMs = 1000\n\tdefaultS3Timeout = 3 * time.Second\n)\n\nvar (\n\t\/\/ for easy testing, not set to const\n\tmegabyte int64 = 1024 * 1024\n\tdefaultMaxLogSize = 64 * megabyte\n)\n\n\/\/go:generate mockery --name=fileWriter --inpackage\ntype fileWriter interface {\n\tio.WriteCloser\n\tflusher\n\n\t\/\/ AdvanceTs receive the commitTs in the event from caller\n\tAdvanceTs(commitTs uint64)\n\t\/\/ GC run gc to remove useless files base on the checkPointTs\n\tGC(checkPointTs uint64) error\n\t\/\/ IsRunning check the fileWriter status\n\tIsRunning() bool\n}\n\ntype flusher interface {\n\tFlush() error\n}\n\n\/\/ FileWriterConfig is the configuration used by a Writer.\ntype FileWriterConfig struct {\n\tDir string\n\tChangeFeedID model.ChangeFeedID\n\tCaptureID string\n\tFileType string\n\tCreateTime time.Time\n\t\/\/ MaxLogSize is the maximum size of log in megabyte, defaults to defaultMaxLogSize.\n\tMaxLogSize int64\n\tFlushIntervalInMs int64\n\tS3Storage bool\n\tS3URI url.URL\n}\n\n\/\/ Option define the writerOptions\ntype Option func(writer *writerOptions)\n\ntype writerOptions struct {\n\tgetLogFileName func() string\n}\n\n\/\/ WithLogFileName provide the Option for fileName\nfunc WithLogFileName(f func() string) Option {\n\treturn func(o *writerOptions) {\n\t\tif f != nil {\n\t\t\to.getLogFileName = f\n\t\t}\n\t}\n}\n\n\/\/ Writer is a redo log event Writer which writes redo log events to a file.\ntype Writer struct {\n\tcfg *FileWriterConfig\n\top *writerOptions\n\t\/\/ maxCommitTS is the max commitTS among the events in one log file\n\tmaxCommitTS atomic.Uint64\n\t\/\/ the ts used in file name\n\tcommitTS atomic.Uint64\n\t\/\/ the ts send with the event\n\teventCommitTS atomic.Uint64\n\trunning atomic.Bool\n\tgcRunning atomic.Bool\n\tsize int64\n\tfile *os.File\n\tbw *pioutil.PageWriter\n\tuint64buf []byte\n\tstorage storage.ExternalStorage\n\tsync.RWMutex\n\n\tmetricFsyncDuration prometheus.Observer\n\tmetricFlushAllDuration prometheus.Observer\n\tmetricWriteBytes prometheus.Gauge\n}\n\n\/\/ NewWriter return a file rotated writer, TODO: extract to a common rotate Writer\nfunc NewWriter(ctx context.Context, cfg *FileWriterConfig, opts ...Option) (*Writer, error) {\n\tif cfg == nil {\n\t\treturn nil, cerror.WrapError(cerror.ErrRedoConfigInvalid, errors.New(\"FileWriterConfig can not be nil\"))\n\t}\n\n\tif cfg.FlushIntervalInMs == 0 {\n\t\tcfg.FlushIntervalInMs = defaultFlushIntervalInMs\n\t}\n\tcfg.MaxLogSize *= megabyte\n\tif cfg.MaxLogSize == 0 {\n\t\tcfg.MaxLogSize = defaultMaxLogSize\n\t}\n\tvar s3storage storage.ExternalStorage\n\tif cfg.S3Storage {\n\t\tvar err error\n\t\ts3storage, err = common.InitS3storage(ctx, cfg.S3URI)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\top := &writerOptions{}\n\tfor _, opt := range opts {\n\t\topt(op)\n\t}\n\tw := &Writer{\n\t\tcfg: cfg,\n\t\top: op,\n\t\tuint64buf: make([]byte, 8),\n\t\tstorage: s3storage,\n\n\t\tmetricFsyncDuration: redoFsyncDurationHistogram.\n\t\t\tWithLabelValues(cfg.ChangeFeedID.Namespace, cfg.ChangeFeedID.ID),\n\t\tmetricFlushAllDuration: redoFlushAllDurationHistogram.\n\t\t\tWithLabelValues(cfg.ChangeFeedID.Namespace, cfg.ChangeFeedID.ID),\n\t\tmetricWriteBytes: redoWriteBytesGauge.\n\t\t\tWithLabelValues(cfg.ChangeFeedID.Namespace, cfg.ChangeFeedID.ID),\n\t}\n\n\tw.running.Store(true)\n\tgo w.runFlushToDisk(ctx, cfg.FlushIntervalInMs)\n\n\treturn w, nil\n}\n\nfunc (w *Writer) runFlushToDisk(ctx context.Context, flushIntervalInMs int64) {\n\tticker := time.NewTicker(time.Duration(flushIntervalInMs) * time.Millisecond)\n\tdefer ticker.Stop()\n\n\tfor {\n\t\tif !w.IsRunning() {\n\t\t\treturn\n\t\t}\n\n\t\tselect {\n\t\tcase <-ctx.Done():\n\t\t\terr := w.Close()\n\t\t\tif err != nil {\n\t\t\t\tlog.Error(\"runFlushToDisk close fail\",\n\t\t\t\t\tzap.String(\"namespace\", w.cfg.ChangeFeedID.Namespace),\n\t\t\t\t\tzap.String(\"changefeed\", w.cfg.ChangeFeedID.ID),\n\t\t\t\t\tzap.Error(err))\n\t\t\t}\n\t\tcase <-ticker.C:\n\t\t\terr := w.Flush()\n\t\t\tif err != nil {\n\t\t\t\tlog.Error(\"redo log flush fail\",\n\t\t\t\t\tzap.String(\"namespace\", w.cfg.ChangeFeedID.Namespace),\n\t\t\t\t\tzap.String(\"changefeed\", w.cfg.ChangeFeedID.ID), zap.Error(err))\n\t\t\t}\n\t\t}\n\t}\n}\n\n\/\/ Write implement write interface\n\/\/ TODO: more general api with fileName generated by caller\nfunc (w *Writer) Write(rawData []byte) (int, error) {\n\tw.Lock()\n\tdefer w.Unlock()\n\n\twriteLen := int64(len(rawData))\n\tif writeLen > w.cfg.MaxLogSize {\n\t\treturn 0, cerror.ErrFileSizeExceed.GenWithStackByArgs(writeLen, w.cfg.MaxLogSize)\n\t}\n\n\tif w.file == nil {\n\t\tif err := w.openOrNew(len(rawData)); err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\n\tif w.size+writeLen > w.cfg.MaxLogSize {\n\t\tif err := w.rotate(); err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\tif w.maxCommitTS.Load() < w.eventCommitTS.Load() {\n\t\tw.maxCommitTS.Store(w.eventCommitTS.Load())\n\t}\n\t\/\/ ref: https:\/\/github.com\/etcd-io\/etcd\/pull\/5250\n\tlenField, padBytes := encodeFrameSize(len(rawData))\n\tif err := w.writeUint64(lenField, w.uint64buf); err != nil {\n\t\treturn 0, err\n\t}\n\n\tif padBytes != 0 {\n\t\trawData = append(rawData, make([]byte, padBytes)...)\n\t}\n\n\tn, err := w.bw.Write(rawData)\n\tw.metricWriteBytes.Add(float64(n))\n\tw.size += int64(n)\n\treturn n, err\n}\n\n\/\/ AdvanceTs implement Advance interface\nfunc (w *Writer) AdvanceTs(commitTs uint64) {\n\tw.eventCommitTS.Store(commitTs)\n}\n\nfunc (w *Writer) writeUint64(n uint64, buf []byte) error {\n\tbinary.LittleEndian.PutUint64(buf, n)\n\tv, err := w.bw.Write(buf)\n\tw.metricWriteBytes.Add(float64(v))\n\n\treturn err\n}\n\n\/\/ the func uses code from etcd wal\/encoder.go\n\/\/ ref: https:\/\/github.com\/etcd-io\/etcd\/pull\/5250\nfunc encodeFrameSize(dataBytes int) (lenField uint64, padBytes int) {\n\tlenField = uint64(dataBytes)\n\t\/\/ force 8 byte alignment so length never gets a torn write\n\tpadBytes = (8 - (dataBytes % 8)) % 8\n\tif padBytes != 0 {\n\t\tlenField |= uint64(0x80|padBytes) << 56\n\t}\n\treturn lenField, padBytes\n}\n\n\/\/ Close implements fileWriter.Close.\nfunc (w *Writer) Close() error {\n\tw.Lock()\n\tdefer w.Unlock()\n\t\/\/ always set to false when closed, since if having err may not get fixed just by retry\n\tdefer w.running.Store(false)\n\n\tif !w.IsRunning() {\n\t\treturn nil\n\t}\n\n\tredoFlushAllDurationHistogram.\n\t\tDeleteLabelValues(w.cfg.ChangeFeedID.Namespace, w.cfg.ChangeFeedID.ID)\n\tredoFsyncDurationHistogram.\n\t\tDeleteLabelValues(w.cfg.ChangeFeedID.Namespace, w.cfg.ChangeFeedID.ID)\n\tredoWriteBytesGauge.\n\t\tDeleteLabelValues(w.cfg.ChangeFeedID.Namespace, w.cfg.ChangeFeedID.ID)\n\n\treturn w.close()\n}\n\n\/\/ IsRunning implement IsRunning interface\nfunc (w *Writer) IsRunning() bool {\n\treturn w.running.Load()\n}\n\nfunc (w *Writer) isGCRunning() bool {\n\treturn w.gcRunning.Load()\n}\n\nfunc (w *Writer) close() error {\n\tif w.file == nil {\n\t\treturn nil\n\t}\n\terr := w.flushAll()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t\/\/ rename the file name from commitTs.log.tmp to maxCommitTS.log if closed safely\n\t\/\/ after rename, the file name could be used for search, since the ts is the max ts for all events in the file.\n\tw.commitTS.Store(w.maxCommitTS.Load())\n\terr = os.Rename(w.file.Name(), w.filePath())\n\tif err != nil {\n\t\treturn cerror.WrapError(cerror.ErrRedoFileOp, err)\n\t}\n\n\tif w.cfg.S3Storage {\n\t\tctx, cancel := context.WithTimeout(context.Background(), defaultS3Timeout)\n\t\tdefer cancel()\n\n\t\terr = w.renameInS3(ctx, w.file.Name(), w.filePath())\n\t\tif err != nil {\n\t\t\treturn cerror.WrapError(cerror.ErrS3StorageAPI, err)\n\t\t}\n\t}\n\n\terr = w.file.Close()\n\tw.file = nil\n\treturn cerror.WrapError(cerror.ErrRedoFileOp, err)\n}\n\nfunc (w *Writer) renameInS3(ctx context.Context, oldPath, newPath string) error {\n\terr := w.writeToS3(ctx, newPath)\n\tif err != nil {\n\t\treturn cerror.WrapError(cerror.ErrS3StorageAPI, err)\n\t}\n\treturn cerror.WrapError(cerror.ErrS3StorageAPI, w.storage.DeleteFile(ctx, filepath.Base(oldPath)))\n}\n\nfunc (w *Writer) getLogFileName() string {\n\tif w.op != nil && w.op.getLogFileName != nil {\n\t\treturn w.op.getLogFileName()\n\t}\n\tif model.DefaultNamespace == w.cfg.ChangeFeedID.Namespace {\n\t\treturn fmt.Sprintf(\"%s_%s_%d_%s_%d%s\", w.cfg.CaptureID,\n\t\t\tw.cfg.ChangeFeedID.ID,\n\t\t\tw.cfg.CreateTime.Unix(), w.cfg.FileType, w.commitTS.Load(), common.LogEXT)\n\t}\n\treturn fmt.Sprintf(\"%s_%s_%s_%d_%s_%d%s\", w.cfg.CaptureID,\n\t\tw.cfg.ChangeFeedID.Namespace, w.cfg.ChangeFeedID.ID,\n\t\tw.cfg.CreateTime.Unix(), w.cfg.FileType, w.commitTS.Load(), common.LogEXT)\n}\n\nfunc (w *Writer) filePath() string {\n\treturn filepath.Join(w.cfg.Dir, w.getLogFileName())\n}\n\nfunc openTruncFile(name string) (*os.File, error) {\n\treturn os.OpenFile(name, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, common.DefaultFileMode)\n}\n\nfunc (w *Writer) openNew() error {\n\terr := os.MkdirAll(w.cfg.Dir, common.DefaultDirMode)\n\tif err != nil {\n\t\treturn cerror.WrapError(cerror.ErrRedoFileOp, errors.Annotatef(err, \"can't make dir: %s for new redo logfile\", w.cfg.Dir))\n\t}\n\n\t\/\/ reset ts used in file name when new file\n\tw.commitTS.Store(w.eventCommitTS.Load())\n\tw.maxCommitTS.Store(w.eventCommitTS.Load())\n\tpath := w.filePath() + common.TmpEXT\n\tf, err := openTruncFile(path)\n\tif err != nil {\n\t\treturn cerror.WrapError(cerror.ErrRedoFileOp, errors.Annotate(err, \"can't open new redo logfile\"))\n\t}\n\tw.file = f\n\tw.size = 0\n\terr = w.newPageWriter()\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}\n\nfunc (w *Writer) openOrNew(writeLen int) error {\n\tpath := w.filePath()\n\tinfo, err := os.Stat(path)\n\tif os.IsNotExist(err) {\n\t\treturn w.openNew()\n\t}\n\tif err != nil {\n\t\treturn cerror.WrapError(cerror.ErrRedoFileOp, errors.Annotate(err, \"error getting log file info\"))\n\t}\n\n\tif info.Size()+int64(writeLen) >= w.cfg.MaxLogSize {\n\t\treturn w.rotate()\n\t}\n\n\tfile, err := os.OpenFile(path, os.O_APPEND|os.O_WRONLY, common.DefaultFileMode)\n\tif err != nil {\n\t\t\/\/ return err let the caller decide next move\n\t\treturn cerror.WrapError(cerror.ErrRedoFileOp, err)\n\t}\n\n\tw.file = file\n\tw.size = info.Size()\n\terr = w.newPageWriter()\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}\n\nfunc (w *Writer) newPageWriter() error {\n\toffset, err := w.file.Seek(0, io.SeekCurrent)\n\tif err != nil {\n\t\treturn cerror.WrapError(cerror.ErrRedoFileOp, err)\n\t}\n\tw.bw = pioutil.NewPageWriter(w.file, pageBytes, int(offset))\n\n\treturn nil\n}\n\nfunc (w *Writer) rotate() error {\n\tif err := w.close(); err != nil {\n\t\treturn err\n\t}\n\treturn w.openNew()\n}\n\n\/\/ GC implement GC interface\nfunc (w *Writer) GC(checkPointTs uint64) error {\n\tif !w.IsRunning() || w.isGCRunning() {\n\t\treturn nil\n\t}\n\n\tw.gcRunning.Store(true)\n\tdefer w.gcRunning.Store(false)\n\n\tremove, err := w.getShouldRemovedFiles(checkPointTs)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tvar errs error\n\tfor _, f := range remove {\n\t\terr := os.Remove(filepath.Join(w.cfg.Dir, f.Name()))\n\t\terrs = multierr.Append(errs, err)\n\t}\n\n\tif errs != nil {\n\t\treturn cerror.WrapError(cerror.ErrRedoFileOp, errs)\n\t}\n\n\tif w.cfg.S3Storage {\n\t\t\/\/ since if fail delete in s3, do not block any path, so just log the error if any\n\t\tgo func() {\n\t\t\tvar errs error\n\t\t\tfor _, f := range remove {\n\t\t\t\terr := w.storage.DeleteFile(context.Background(), f.Name())\n\t\t\t\terrs = multierr.Append(errs, err)\n\t\t\t}\n\t\t\tif errs != nil {\n\t\t\t\terrs = cerror.WrapError(cerror.ErrS3StorageAPI, errs)\n\t\t\t\tlog.Warn(\"delete redo log in s3 fail\", zap.Error(errs))\n\t\t\t}\n\t\t}()\n\t}\n\n\treturn nil\n}\n\n\/\/ shouldRemoved remove the file which commitTs in file name (max commitTs of all event ts in the file) < checkPointTs,\n\/\/ since all event ts < checkPointTs already sent to sink, the log is not needed any more for recovery\nfunc (w *Writer) shouldRemoved(checkPointTs uint64, f os.FileInfo) (bool, error) {\n\tif filepath.Ext(f.Name()) != common.LogEXT {\n\t\treturn false, nil\n\t}\n\n\tcommitTs, fileType, err := common.ParseLogFileName(f.Name())\n\tif err != nil {\n\t\treturn false, err\n\t}\n\n\treturn commitTs < checkPointTs && fileType == w.cfg.FileType, nil\n}\n\nfunc (w *Writer) getShouldRemovedFiles(checkPointTs uint64) ([]os.FileInfo, error) {\n\tfiles, err := ioutil.ReadDir(w.cfg.Dir)\n\tif err != nil {\n\t\tif os.IsNotExist(err) {\n\t\t\tlog.Warn(\"check removed log dir fail\", zap.Error(err))\n\t\t\treturn []os.FileInfo{}, nil\n\t\t}\n\t\treturn nil, cerror.WrapError(cerror.ErrRedoFileOp, errors.Annotatef(err, \"can't read log file directory: %s\", w.cfg.Dir))\n\t}\n\n\tlogFiles := []os.FileInfo{}\n\tfor _, f := range files {\n\t\tret, err := w.shouldRemoved(checkPointTs, f)\n\t\tif err != nil {\n\t\t\tlog.Warn(\"check removed log file fail\",\n\t\t\t\tzap.String(\"logFile\", f.Name()),\n\t\t\t\tzap.Error(err))\n\t\t\tcontinue\n\t\t}\n\n\t\tif ret {\n\t\t\tlogFiles = append(logFiles, f)\n\t\t}\n\t}\n\n\treturn logFiles, nil\n}\n\nfunc (w *Writer) flushAll() error {\n\tif w.file == nil {\n\t\treturn nil\n\t}\n\n\tstart := time.Now()\n\terr := w.flush()\n\tif err != nil {\n\t\treturn err\n\t}\n\tif !w.cfg.S3Storage {\n\t\treturn nil\n\t}\n\n\tctx, cancel := context.WithTimeout(context.Background(), defaultS3Timeout)\n\tdefer cancel()\n\n\terr = w.writeToS3(ctx, w.file.Name())\n\tw.metricFlushAllDuration.Observe(time.Since(start).Seconds())\n\n\treturn err\n}\n\n\/\/ Flush implement Flush interface\nfunc (w *Writer) Flush() error {\n\tw.Lock()\n\tdefer w.Unlock()\n\n\treturn w.flushAll()\n}\n\nfunc (w *Writer) flush() error {\n\tif w.file == nil {\n\t\treturn nil\n\t}\n\n\tn, err := w.bw.FlushN()\n\tw.metricWriteBytes.Add(float64(n))\n\tif err != nil {\n\t\treturn cerror.WrapError(cerror.ErrRedoFileOp, err)\n\t}\n\n\tstart := time.Now()\n\terr = w.file.Sync()\n\tw.metricFsyncDuration.Observe(time.Since(start).Seconds())\n\n\treturn cerror.WrapError(cerror.ErrRedoFileOp, err)\n}\n\nfunc (w *Writer) writeToS3(ctx context.Context, name string) error {\n\tfileData, err := os.ReadFile(name)\n\tif err != nil {\n\t\treturn cerror.WrapError(cerror.ErrRedoFileOp, err)\n\t}\n\n\t\/\/ Key in s3: aws.String(rs.options.Prefix + name), prefix should be changefeed name\n\treturn cerror.WrapError(cerror.ErrS3StorageAPI, w.storage.WriteFile(ctx, filepath.Base(name), fileData))\n}\n","avg_line_length":25.6880415945,"max_line_length":124,"alphanum_fraction":0.7002428822} +{"size":1049,"ext":"go","lang":"Go","max_stars_count":null,"content":"\/*\n * Copyright \u00a9 2017-2018 Aeneas Rekkas \n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * @author Aeneas Rekkas \n * @copyright 2017-2018 Aeneas Rekkas \n * @license \t Apache-2.0\n *\/\n\npackage rsakey\n\nimport (\n\t\"testing\"\n\n\t\"github.com\/stretchr\/testify\/assert\"\n)\n\nfunc TestLocalHS256Manager(t *testing.T) {\n\tkey := []byte(\"somkey\")\n\tm1 := NewLocalHS256Manager(key)\n\tm2 := NewLocalHS256Manager(key)\n\n\tassert.EqualValues(t, m1.PublicKeyID(), m2.PublicKeyID())\n}\n","avg_line_length":29.1388888889,"max_line_length":75,"alphanum_fraction":0.7197330791} +{"size":40563,"ext":"go","lang":"Go","max_stars_count":null,"content":"\/\/ Copyright 2020 Coinbase, Inc.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\npackage parser\n\nimport (\n\t\"math\/big\"\n\t\"reflect\"\n\t\"testing\"\n\n\t\"github.com\/HelloKashif\/rosetta-sdk-go\/types\"\n\t\"github.com\/stretchr\/testify\/assert\"\n)\n\nfunc TestMatchOperations(t *testing.T) {\n\tvar tests = map[string]struct {\n\t\toperations []*types.Operation\n\t\tdescriptions *Descriptions\n\n\t\tmatches []*Match\n\t\terr bool\n\t}{\n\t\t\"simple transfer (with extra op)\": {\n\t\t\toperations: []*types.Operation{\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr2\",\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"100\",\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\t{}, \/\/ extra op ignored\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr1\",\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"-100\",\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tdescriptions: &Descriptions{\n\t\t\t\tOppositeAmounts: [][]int{{0, 1}},\n\t\t\t\tOperationDescriptions: []*OperationDescription{\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAmount: &AmountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSign: NegativeAmountSign,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAmount: &AmountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSign: PositiveAmountSign,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tmatches: []*Match{\n\t\t\t\t{\n\t\t\t\t\tOperations: []*types.Operation{\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\t\t\tAddress: \"addr1\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\t\t\tValue: \"-100\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\tAmounts: []*big.Int{big.NewInt(-100)},\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\tOperations: []*types.Operation{\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\t\t\tAddress: \"addr2\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\t\t\tValue: \"100\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\tAmounts: []*big.Int{big.NewInt(100)},\n\t\t\t\t},\n\t\t\t},\n\t\t\terr: false,\n\t\t},\n\t\t\"simple transfer (with too many opposite amounts)\": {\n\t\t\toperations: []*types.Operation{\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr2\",\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"100\",\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\t{}, \/\/ extra op ignored\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr1\",\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"-100\",\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tdescriptions: &Descriptions{\n\t\t\t\tOppositeAmounts: [][]int{{0, 1, 2}},\n\t\t\t\tOperationDescriptions: []*OperationDescription{\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAmount: &AmountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSign: NegativeAmountSign,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAmount: &AmountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSign: PositiveAmountSign,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\terr: true,\n\t\t},\n\t\t\"simple transfer (with missing account error)\": {\n\t\t\toperations: []*types.Operation{\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr2\",\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"100\",\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"-100\",\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tdescriptions: &Descriptions{\n\t\t\t\tOppositeAmounts: [][]int{{0, 1}},\n\t\t\t\tEqualAddresses: [][]int{{0, 1}},\n\t\t\t\tOperationDescriptions: []*OperationDescription{\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: false,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAmount: &AmountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSign: NegativeAmountSign,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAmount: &AmountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSign: PositiveAmountSign,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tmatches: nil,\n\t\t\terr: true,\n\t\t},\n\t\t\"simple transfer (check type)\": {\n\t\t\toperations: []*types.Operation{\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr2\",\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"100\",\n\t\t\t\t\t},\n\t\t\t\t\tType: \"output\",\n\t\t\t\t},\n\t\t\t\t{}, \/\/ extra op ignored\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr1\",\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"-100\",\n\t\t\t\t\t},\n\t\t\t\t\tType: \"input\",\n\t\t\t\t},\n\t\t\t},\n\t\t\tdescriptions: &Descriptions{\n\t\t\t\tOppositeAmounts: [][]int{{0, 1}},\n\t\t\t\tOperationDescriptions: []*OperationDescription{\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tType: \"input\",\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tType: \"output\",\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tmatches: []*Match{\n\t\t\t\t{\n\t\t\t\t\tOperations: []*types.Operation{\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\t\t\tAddress: \"addr1\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\t\t\tValue: \"-100\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tType: \"input\",\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\tAmounts: []*big.Int{big.NewInt(-100)},\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\tOperations: []*types.Operation{\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\t\t\tAddress: \"addr2\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\t\t\tValue: \"100\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tType: \"output\",\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\tAmounts: []*big.Int{big.NewInt(100)},\n\t\t\t\t},\n\t\t\t},\n\t\t\terr: false,\n\t\t},\n\t\t\"simple transfer (reject extra op)\": {\n\t\t\toperations: []*types.Operation{\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr2\",\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"100\",\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\t{}, \/\/ extra op ignored\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr1\",\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"-100\",\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tdescriptions: &Descriptions{\n\t\t\t\tErrUnmatched: true,\n\t\t\t\tOppositeAmounts: [][]int{{0, 1}},\n\t\t\t\tOperationDescriptions: []*OperationDescription{\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAmount: &AmountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSign: NegativeAmountSign,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAmount: &AmountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSign: PositiveAmountSign,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tmatches: nil,\n\t\t\terr: true,\n\t\t},\n\t\t\"simple transfer (with unequal amounts)\": {\n\t\t\toperations: []*types.Operation{\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr2\",\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"100\",\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\t{}, \/\/ extra op ignored\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr1\",\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"-100\",\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tdescriptions: &Descriptions{\n\t\t\t\tEqualAmounts: [][]int{{0, 1}},\n\t\t\t\tOperationDescriptions: []*OperationDescription{\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAmount: &AmountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSign: NegativeAmountSign,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAmount: &AmountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSign: PositiveAmountSign,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tmatches: nil,\n\t\t\terr: true,\n\t\t},\n\t\t\"simple transfer (with equal amounts)\": {\n\t\t\toperations: []*types.Operation{\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr2\",\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"100\",\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\t{}, \/\/ extra op ignored\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr1\",\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"100\",\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tdescriptions: &Descriptions{\n\t\t\t\tEqualAmounts: [][]int{{0, 1}},\n\t\t\t\tOperationDescriptions: []*OperationDescription{\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAmount: &AmountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAmount: &AmountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tmatches: []*Match{\n\t\t\t\t{\n\t\t\t\t\tOperations: []*types.Operation{\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\t\t\tAddress: \"addr2\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\t\t\tValue: \"100\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\tAmounts: []*big.Int{big.NewInt(100)},\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\tOperations: []*types.Operation{\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\t\t\tAddress: \"addr1\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\t\t\tValue: \"100\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\tAmounts: []*big.Int{big.NewInt(100)},\n\t\t\t\t},\n\t\t\t},\n\t\t\terr: false,\n\t\t},\n\t\t\"simple transfer (with coin action)\": {\n\t\t\toperations: []*types.Operation{\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr2\",\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"100\",\n\t\t\t\t\t\tCurrency: &types.Currency{\n\t\t\t\t\t\t\tSymbol: \"BTC\",\n\t\t\t\t\t\t\tDecimals: 8,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\tCoinChange: &types.CoinChange{\n\t\t\t\t\t\tCoinAction: types.CoinSpent,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\t{}, \/\/ extra op ignored\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr1\",\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"-100\",\n\t\t\t\t\t\tCurrency: &types.Currency{\n\t\t\t\t\t\t\tSymbol: \"ETH\",\n\t\t\t\t\t\t\tDecimals: 18,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tdescriptions: &Descriptions{\n\t\t\t\tOppositeAmounts: [][]int{{0, 1}},\n\t\t\t\tOperationDescriptions: []*OperationDescription{\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAmount: &AmountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSign: NegativeAmountSign,\n\t\t\t\t\t\t\tCurrency: &types.Currency{\n\t\t\t\t\t\t\t\tSymbol: \"ETH\",\n\t\t\t\t\t\t\t\tDecimals: 18,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAmount: &AmountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSign: PositiveAmountSign,\n\t\t\t\t\t\t\tCurrency: &types.Currency{\n\t\t\t\t\t\t\t\tSymbol: \"BTC\",\n\t\t\t\t\t\t\t\tDecimals: 8,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t\tCoinAction: types.CoinSpent,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tmatches: []*Match{\n\t\t\t\t{\n\t\t\t\t\tOperations: []*types.Operation{\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\t\t\tAddress: \"addr1\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\t\t\tValue: \"-100\",\n\t\t\t\t\t\t\t\tCurrency: &types.Currency{\n\t\t\t\t\t\t\t\t\tSymbol: \"ETH\",\n\t\t\t\t\t\t\t\t\tDecimals: 18,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\tAmounts: []*big.Int{big.NewInt(-100)},\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\tOperations: []*types.Operation{\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\t\t\tAddress: \"addr2\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\t\t\tValue: \"100\",\n\t\t\t\t\t\t\t\tCurrency: &types.Currency{\n\t\t\t\t\t\t\t\t\tSymbol: \"BTC\",\n\t\t\t\t\t\t\t\t\tDecimals: 8,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tCoinChange: &types.CoinChange{\n\t\t\t\t\t\t\t\tCoinAction: types.CoinSpent,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\tAmounts: []*big.Int{big.NewInt(100)},\n\t\t\t\t},\n\t\t\t},\n\t\t\terr: false,\n\t\t},\n\t\t\"simple transfer (missing coin action)\": {\n\t\t\toperations: []*types.Operation{\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr2\",\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"100\",\n\t\t\t\t\t\tCurrency: &types.Currency{\n\t\t\t\t\t\t\tSymbol: \"BTC\",\n\t\t\t\t\t\t\tDecimals: 8,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\t{}, \/\/ extra op ignored\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr1\",\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"-100\",\n\t\t\t\t\t\tCurrency: &types.Currency{\n\t\t\t\t\t\t\tSymbol: \"ETH\",\n\t\t\t\t\t\t\tDecimals: 18,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tdescriptions: &Descriptions{\n\t\t\t\tOppositeAmounts: [][]int{{0, 1}},\n\t\t\t\tOperationDescriptions: []*OperationDescription{\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAmount: &AmountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSign: NegativeAmountSign,\n\t\t\t\t\t\t\tCurrency: &types.Currency{\n\t\t\t\t\t\t\t\tSymbol: \"ETH\",\n\t\t\t\t\t\t\t\tDecimals: 18,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAmount: &AmountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSign: PositiveAmountSign,\n\t\t\t\t\t\t\tCurrency: &types.Currency{\n\t\t\t\t\t\t\t\tSymbol: \"BTC\",\n\t\t\t\t\t\t\t\tDecimals: 8,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t\tCoinAction: types.CoinSpent,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\terr: true,\n\t\t},\n\t\t\"simple transfer (incorrect coin action)\": {\n\t\t\toperations: []*types.Operation{\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr2\",\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"100\",\n\t\t\t\t\t\tCurrency: &types.Currency{\n\t\t\t\t\t\t\tSymbol: \"BTC\",\n\t\t\t\t\t\t\tDecimals: 8,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\tCoinChange: &types.CoinChange{\n\t\t\t\t\t\tCoinAction: types.CoinCreated,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\t{}, \/\/ extra op ignored\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr1\",\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"-100\",\n\t\t\t\t\t\tCurrency: &types.Currency{\n\t\t\t\t\t\t\tSymbol: \"ETH\",\n\t\t\t\t\t\t\tDecimals: 18,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tdescriptions: &Descriptions{\n\t\t\t\tOppositeAmounts: [][]int{{0, 1}},\n\t\t\t\tOperationDescriptions: []*OperationDescription{\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAmount: &AmountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSign: NegativeAmountSign,\n\t\t\t\t\t\t\tCurrency: &types.Currency{\n\t\t\t\t\t\t\t\tSymbol: \"ETH\",\n\t\t\t\t\t\t\t\tDecimals: 18,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAmount: &AmountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSign: PositiveAmountSign,\n\t\t\t\t\t\t\tCurrency: &types.Currency{\n\t\t\t\t\t\t\t\tSymbol: \"BTC\",\n\t\t\t\t\t\t\t\tDecimals: 8,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t\tCoinAction: types.CoinSpent,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\terr: true,\n\t\t},\n\t\t\"simple transfer (with currency)\": {\n\t\t\toperations: []*types.Operation{\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr2\",\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"100\",\n\t\t\t\t\t\tCurrency: &types.Currency{\n\t\t\t\t\t\t\tSymbol: \"BTC\",\n\t\t\t\t\t\t\tDecimals: 8,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\t{}, \/\/ extra op ignored\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr1\",\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"-100\",\n\t\t\t\t\t\tCurrency: &types.Currency{\n\t\t\t\t\t\t\tSymbol: \"ETH\",\n\t\t\t\t\t\t\tDecimals: 18,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tdescriptions: &Descriptions{\n\t\t\t\tOppositeAmounts: [][]int{{0, 1}},\n\t\t\t\tOperationDescriptions: []*OperationDescription{\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAmount: &AmountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSign: NegativeAmountSign,\n\t\t\t\t\t\t\tCurrency: &types.Currency{\n\t\t\t\t\t\t\t\tSymbol: \"ETH\",\n\t\t\t\t\t\t\t\tDecimals: 18,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAmount: &AmountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSign: PositiveAmountSign,\n\t\t\t\t\t\t\tCurrency: &types.Currency{\n\t\t\t\t\t\t\t\tSymbol: \"BTC\",\n\t\t\t\t\t\t\t\tDecimals: 8,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tmatches: []*Match{\n\t\t\t\t{\n\t\t\t\t\tOperations: []*types.Operation{\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\t\t\tAddress: \"addr1\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\t\t\tValue: \"-100\",\n\t\t\t\t\t\t\t\tCurrency: &types.Currency{\n\t\t\t\t\t\t\t\t\tSymbol: \"ETH\",\n\t\t\t\t\t\t\t\t\tDecimals: 18,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\tAmounts: []*big.Int{big.NewInt(-100)},\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\tOperations: []*types.Operation{\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\t\t\tAddress: \"addr2\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\t\t\tValue: \"100\",\n\t\t\t\t\t\t\t\tCurrency: &types.Currency{\n\t\t\t\t\t\t\t\t\tSymbol: \"BTC\",\n\t\t\t\t\t\t\t\t\tDecimals: 8,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\tAmounts: []*big.Int{big.NewInt(100)},\n\t\t\t\t},\n\t\t\t},\n\t\t\terr: false,\n\t\t},\n\t\t\"simple transfer (with missing currency)\": {\n\t\t\toperations: []*types.Operation{\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr2\",\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"100\",\n\t\t\t\t\t\tCurrency: &types.Currency{\n\t\t\t\t\t\t\tSymbol: \"ETH\",\n\t\t\t\t\t\t\tDecimals: 18,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\t{}, \/\/ extra op ignored\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr1\",\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"-100\",\n\t\t\t\t\t\tCurrency: &types.Currency{\n\t\t\t\t\t\t\tSymbol: \"ETH\",\n\t\t\t\t\t\t\tDecimals: 18,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tdescriptions: &Descriptions{\n\t\t\t\tOppositeAmounts: [][]int{{0, 1}},\n\t\t\t\tOperationDescriptions: []*OperationDescription{\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAmount: &AmountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSign: NegativeAmountSign,\n\t\t\t\t\t\t\tCurrency: &types.Currency{\n\t\t\t\t\t\t\t\tSymbol: \"ETH\",\n\t\t\t\t\t\t\t\tDecimals: 18,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAmount: &AmountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSign: PositiveAmountSign,\n\t\t\t\t\t\t\tCurrency: &types.Currency{\n\t\t\t\t\t\t\t\tSymbol: \"BTC\",\n\t\t\t\t\t\t\t\tDecimals: 8,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tmatches: nil,\n\t\t\terr: true,\n\t\t},\n\t\t\"simple transfer (with sender metadata) and non-equal addresses\": {\n\t\t\toperations: []*types.Operation{\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr2\",\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"100\",\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\t{}, \/\/ extra op ignored\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr1\",\n\t\t\t\t\t\tSubAccount: &types.SubAccountIdentifier{\n\t\t\t\t\t\t\tAddress: \"sub\",\n\t\t\t\t\t\t\tMetadata: map[string]interface{}{\n\t\t\t\t\t\t\t\t\"validator\": \"10\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"-100\",\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tdescriptions: &Descriptions{\n\t\t\t\tOppositeAmounts: [][]int{{0, 1}},\n\t\t\t\tEqualAddresses: [][]int{{0, 1}},\n\t\t\t\tOperationDescriptions: []*OperationDescription{\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSubAccountExists: true,\n\t\t\t\t\t\t\tSubAccountAddress: \"sub\",\n\t\t\t\t\t\t\tSubAccountMetadataKeys: []*MetadataDescription{\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\tKey: \"validator\",\n\t\t\t\t\t\t\t\t\tValueKind: reflect.String,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAmount: &AmountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSign: NegativeAmountSign,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAmount: &AmountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSign: PositiveAmountSign,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tmatches: nil,\n\t\t\terr: true,\n\t\t},\n\t\t\"simple transfer (with sender metadata)\": {\n\t\t\toperations: []*types.Operation{\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr1\",\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"100\",\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\t{}, \/\/ extra op ignored\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr1\",\n\t\t\t\t\t\tSubAccount: &types.SubAccountIdentifier{\n\t\t\t\t\t\t\tAddress: \"sub\",\n\t\t\t\t\t\t\tMetadata: map[string]interface{}{\n\t\t\t\t\t\t\t\t\"validator\": \"10\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"-100\",\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tdescriptions: &Descriptions{\n\t\t\t\tOppositeAmounts: [][]int{{0, 1}},\n\t\t\t\tEqualAddresses: [][]int{{0, 1}},\n\t\t\t\tOperationDescriptions: []*OperationDescription{\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSubAccountExists: true,\n\t\t\t\t\t\t\tSubAccountAddress: \"sub\",\n\t\t\t\t\t\t\tSubAccountMetadataKeys: []*MetadataDescription{\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\tKey: \"validator\",\n\t\t\t\t\t\t\t\t\tValueKind: reflect.String,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAmount: &AmountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSign: NegativeAmountSign,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAmount: &AmountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSign: PositiveAmountSign,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tmatches: []*Match{\n\t\t\t\t{\n\t\t\t\t\tOperations: []*types.Operation{\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\t\t\tAddress: \"addr1\",\n\t\t\t\t\t\t\t\tSubAccount: &types.SubAccountIdentifier{\n\t\t\t\t\t\t\t\t\tAddress: \"sub\",\n\t\t\t\t\t\t\t\t\tMetadata: map[string]interface{}{\n\t\t\t\t\t\t\t\t\t\t\"validator\": \"10\",\n\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\t\t\tValue: \"-100\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\tAmounts: []*big.Int{big.NewInt(-100)},\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\tOperations: []*types.Operation{\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\t\t\tAddress: \"addr1\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\t\t\tValue: \"100\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\tAmounts: []*big.Int{big.NewInt(100)},\n\t\t\t\t},\n\t\t\t},\n\t\t\terr: false,\n\t\t},\n\t\t\"simple transfer (with missing sender address metadata)\": {\n\t\t\toperations: []*types.Operation{\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr2\",\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"100\",\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\t{}, \/\/ extra op ignored\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr1\",\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"-100\",\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tdescriptions: &Descriptions{\n\t\t\t\tOppositeAmounts: [][]int{{0, 1}},\n\t\t\t\tOperationDescriptions: []*OperationDescription{\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSubAccountExists: true,\n\t\t\t\t\t\t\tSubAccountAddress: \"sub\",\n\t\t\t\t\t\t\tSubAccountMetadataKeys: []*MetadataDescription{\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\tKey: \"validator\",\n\t\t\t\t\t\t\t\t\tValueKind: reflect.String,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAmount: &AmountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSign: NegativeAmountSign,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAmount: &AmountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSign: PositiveAmountSign,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tmatches: nil,\n\t\t\terr: true,\n\t\t},\n\t\t\"nil amount ops\": {\n\t\t\toperations: []*types.Operation{\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr1\",\n\t\t\t\t\t\tSubAccount: &types.SubAccountIdentifier{\n\t\t\t\t\t\t\tAddress: \"sub 1\",\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr2\",\n\t\t\t\t\t\tSubAccount: &types.SubAccountIdentifier{\n\t\t\t\t\t\t\tAddress: \"sub 2\",\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"100\",\n\t\t\t\t\t}, \/\/ allowed because no amount requirement provided\n\t\t\t\t},\n\t\t\t},\n\t\t\tdescriptions: &Descriptions{\n\t\t\t\tOperationDescriptions: []*OperationDescription{\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSubAccountExists: true,\n\t\t\t\t\t\t\tSubAccountAddress: \"sub 2\",\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSubAccountExists: true,\n\t\t\t\t\t\t\tSubAccountAddress: \"sub 1\",\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tmatches: []*Match{\n\t\t\t\t{\n\t\t\t\t\tOperations: []*types.Operation{\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\t\t\tAddress: \"addr2\",\n\t\t\t\t\t\t\t\tSubAccount: &types.SubAccountIdentifier{\n\t\t\t\t\t\t\t\t\tAddress: \"sub 2\",\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\t\t\tValue: \"100\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\tAmounts: []*big.Int{big.NewInt(100)},\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\tOperations: []*types.Operation{\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\t\t\tAddress: \"addr1\",\n\t\t\t\t\t\t\t\tSubAccount: &types.SubAccountIdentifier{\n\t\t\t\t\t\t\t\t\tAddress: \"sub 1\",\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\tAmounts: []*big.Int{nil},\n\t\t\t\t},\n\t\t\t},\n\t\t\terr: false,\n\t\t},\n\t\t\"nil amount ops (force false amount)\": {\n\t\t\toperations: []*types.Operation{\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr1\",\n\t\t\t\t\t\tSubAccount: &types.SubAccountIdentifier{\n\t\t\t\t\t\t\tAddress: \"sub 1\",\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr2\",\n\t\t\t\t\t\tSubAccount: &types.SubAccountIdentifier{\n\t\t\t\t\t\t\tAddress: \"sub 2\",\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{},\n\t\t\t\t},\n\t\t\t},\n\t\t\tdescriptions: &Descriptions{\n\t\t\t\tOperationDescriptions: []*OperationDescription{\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSubAccountExists: true,\n\t\t\t\t\t\t\tSubAccountAddress: \"sub 2\",\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAmount: &AmountDescription{\n\t\t\t\t\t\t\tExists: false,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSubAccountExists: true,\n\t\t\t\t\t\t\tSubAccountAddress: \"sub 1\",\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tmatches: nil,\n\t\t\terr: true,\n\t\t},\n\t\t\"nil amount ops (only require metadata keys)\": {\n\t\t\toperations: []*types.Operation{\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr1\",\n\t\t\t\t\t\tSubAccount: &types.SubAccountIdentifier{\n\t\t\t\t\t\t\tAddress: \"sub 1\",\n\t\t\t\t\t\t\tMetadata: map[string]interface{}{\n\t\t\t\t\t\t\t\t\"validator\": -1000,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr2\",\n\t\t\t\t\t\tSubAccount: &types.SubAccountIdentifier{\n\t\t\t\t\t\t\tAddress: \"sub 2\",\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tdescriptions: &Descriptions{\n\t\t\t\tOperationDescriptions: []*OperationDescription{\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSubAccountExists: true,\n\t\t\t\t\t\t\tSubAccountAddress: \"sub 2\",\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSubAccountExists: true,\n\t\t\t\t\t\t\tSubAccountMetadataKeys: []*MetadataDescription{\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\tKey: \"validator\",\n\t\t\t\t\t\t\t\t\tValueKind: reflect.Int,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tmatches: []*Match{\n\t\t\t\t{\n\t\t\t\t\tOperations: []*types.Operation{\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\t\t\tAddress: \"addr2\",\n\t\t\t\t\t\t\t\tSubAccount: &types.SubAccountIdentifier{\n\t\t\t\t\t\t\t\t\tAddress: \"sub 2\",\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\tAmounts: []*big.Int{nil},\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\tOperations: []*types.Operation{\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\t\t\tAddress: \"addr1\",\n\t\t\t\t\t\t\t\tSubAccount: &types.SubAccountIdentifier{\n\t\t\t\t\t\t\t\t\tAddress: \"sub 1\",\n\t\t\t\t\t\t\t\t\tMetadata: map[string]interface{}{\n\t\t\t\t\t\t\t\t\t\t\"validator\": -1000,\n\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\tAmounts: []*big.Int{nil},\n\t\t\t\t},\n\t\t\t},\n\t\t\terr: false,\n\t\t},\n\t\t\"nil amount ops (sub account address mismatch)\": {\n\t\t\toperations: []*types.Operation{\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr1\",\n\t\t\t\t\t\tSubAccount: &types.SubAccountIdentifier{\n\t\t\t\t\t\t\tAddress: \"sub 3\",\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr2\",\n\t\t\t\t\t\tSubAccount: &types.SubAccountIdentifier{\n\t\t\t\t\t\t\tAddress: \"sub 2\",\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tdescriptions: &Descriptions{\n\t\t\t\tOperationDescriptions: []*OperationDescription{\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSubAccountExists: true,\n\t\t\t\t\t\t\tSubAccountAddress: \"sub 2\",\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSubAccountExists: true,\n\t\t\t\t\t\t\tSubAccountAddress: \"sub 1\",\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tmatches: nil,\n\t\t\terr: true,\n\t\t},\n\t\t\"nil descriptions\": {\n\t\t\toperations: []*types.Operation{\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr1\",\n\t\t\t\t\t\tSubAccount: &types.SubAccountIdentifier{\n\t\t\t\t\t\t\tAddress: \"sub 3\",\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr2\",\n\t\t\t\t\t\tSubAccount: &types.SubAccountIdentifier{\n\t\t\t\t\t\t\tAddress: \"sub 2\",\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tdescriptions: &Descriptions{},\n\t\t\tmatches: nil,\n\t\t\terr: true,\n\t\t},\n\t\t\"2 empty descriptions\": {\n\t\t\toperations: []*types.Operation{\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr1\",\n\t\t\t\t\t\tSubAccount: &types.SubAccountIdentifier{\n\t\t\t\t\t\t\tAddress: \"sub 3\",\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr2\",\n\t\t\t\t\t\tSubAccount: &types.SubAccountIdentifier{\n\t\t\t\t\t\t\tAddress: \"sub 2\",\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tdescriptions: &Descriptions{\n\t\t\t\tOperationDescriptions: []*OperationDescription{\n\t\t\t\t\t{},\n\t\t\t\t\t{},\n\t\t\t\t},\n\t\t\t},\n\t\t\tmatches: []*Match{\n\t\t\t\t{\n\t\t\t\t\tOperations: []*types.Operation{\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\t\t\tAddress: \"addr1\",\n\t\t\t\t\t\t\t\tSubAccount: &types.SubAccountIdentifier{\n\t\t\t\t\t\t\t\t\tAddress: \"sub 3\",\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\tAmounts: []*big.Int{nil},\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\tOperations: []*types.Operation{\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\t\t\tAddress: \"addr2\",\n\t\t\t\t\t\t\t\tSubAccount: &types.SubAccountIdentifier{\n\t\t\t\t\t\t\t\t\tAddress: \"sub 2\",\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\tAmounts: []*big.Int{nil},\n\t\t\t\t},\n\t\t\t},\n\t\t\terr: false,\n\t\t},\n\t\t\"empty operations\": {\n\t\t\toperations: []*types.Operation{},\n\t\t\tdescriptions: &Descriptions{\n\t\t\t\tOperationDescriptions: []*OperationDescription{\n\t\t\t\t\t{},\n\t\t\t\t\t{},\n\t\t\t\t},\n\t\t\t},\n\t\t\tmatches: nil,\n\t\t\terr: true,\n\t\t},\n\t\t\"simple repeated op\": {\n\t\t\toperations: []*types.Operation{\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr2\",\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"200\",\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\t{}, \/\/ extra op ignored\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr1\",\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"100\",\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tdescriptions: &Descriptions{\n\t\t\t\tOperationDescriptions: []*OperationDescription{\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAmount: &AmountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSign: PositiveAmountSign,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAllowRepeats: true,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tmatches: []*Match{\n\t\t\t\t{\n\t\t\t\t\tOperations: []*types.Operation{\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\t\t\tAddress: \"addr2\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\t\t\tValue: \"200\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\t\t\tAddress: \"addr1\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\t\t\tValue: \"100\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\tAmounts: []*big.Int{\n\t\t\t\t\t\tbig.NewInt(200),\n\t\t\t\t\t\tbig.NewInt(100),\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\terr: false,\n\t\t},\n\t\t\"simple repeated op (no extra ops allowed)\": {\n\t\t\toperations: []*types.Operation{\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr2\",\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"200\",\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\t{}, \/\/ extra op ignored\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr1\",\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"100\",\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tdescriptions: &Descriptions{\n\t\t\t\tOperationDescriptions: []*OperationDescription{\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAmount: &AmountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSign: PositiveAmountSign,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAllowRepeats: true,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tErrUnmatched: true,\n\t\t\t},\n\t\t\tmatches: nil,\n\t\t\terr: true,\n\t\t},\n\t\t\"simple repeated op (with invalid comparison indexes)\": {\n\t\t\toperations: []*types.Operation{\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr2\",\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"200\",\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\t{}, \/\/ extra op ignored\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr1\",\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"100\",\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tdescriptions: &Descriptions{\n\t\t\t\tOppositeAmounts: [][]int{{0, 1}},\n\t\t\t\tOperationDescriptions: []*OperationDescription{\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAmount: &AmountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSign: PositiveAmountSign,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAllowRepeats: true,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tmatches: nil,\n\t\t\terr: true,\n\t\t},\n\t\t\"simple repeated op (with overlapping, repeated descriptions)\": {\n\t\t\toperations: []*types.Operation{\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr2\",\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"200\",\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\t{}, \/\/ extra op ignored\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr1\",\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"100\",\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tdescriptions: &Descriptions{\n\t\t\t\tOperationDescriptions: []*OperationDescription{\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAmount: &AmountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSign: PositiveAmountSign,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAllowRepeats: true,\n\t\t\t\t\t},\n\t\t\t\t\t{ \/\/ will never be possible to meet this description\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAmount: &AmountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSign: PositiveAmountSign,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAllowRepeats: true,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tmatches: nil,\n\t\t\terr: true,\n\t\t},\n\t\t\"complex repeated op\": {\n\t\t\toperations: []*types.Operation{\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr2\",\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"200\",\n\t\t\t\t\t},\n\t\t\t\t\tType: \"output\",\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr3\",\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"200\",\n\t\t\t\t\t},\n\t\t\t\t\tType: \"output\",\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr1\",\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"-200\",\n\t\t\t\t\t},\n\t\t\t\t\tType: \"input\",\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr4\",\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"-200\",\n\t\t\t\t\t},\n\t\t\t\t\tType: \"input\",\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr5\",\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"-1000\",\n\t\t\t\t\t},\n\t\t\t\t\tType: \"runoff\",\n\t\t\t\t},\n\t\t\t},\n\t\t\tdescriptions: &Descriptions{\n\t\t\t\tOppositeAmounts: [][]int{{0, 1}},\n\t\t\t\tOperationDescriptions: []*OperationDescription{\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAmount: &AmountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSign: PositiveAmountSign,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAllowRepeats: true,\n\t\t\t\t\t\tType: \"output\",\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAmount: &AmountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSign: NegativeAmountSign,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAllowRepeats: true,\n\t\t\t\t\t\tType: \"input\",\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAmount: &AmountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSign: NegativeAmountSign,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAllowRepeats: true,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tmatches: []*Match{\n\t\t\t\t{\n\t\t\t\t\tOperations: []*types.Operation{\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\t\t\tAddress: \"addr2\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\t\t\tValue: \"200\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tType: \"output\",\n\t\t\t\t\t\t},\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\t\t\tAddress: \"addr3\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\t\t\tValue: \"200\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tType: \"output\",\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\tAmounts: []*big.Int{\n\t\t\t\t\t\tbig.NewInt(200),\n\t\t\t\t\t\tbig.NewInt(200),\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\tOperations: []*types.Operation{\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\t\t\tAddress: \"addr1\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\t\t\tValue: \"-200\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tType: \"input\",\n\t\t\t\t\t\t},\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\t\t\tAddress: \"addr4\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\t\t\tValue: \"-200\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tType: \"input\",\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\tAmounts: []*big.Int{\n\t\t\t\t\t\tbig.NewInt(-200),\n\t\t\t\t\t\tbig.NewInt(-200),\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\tOperations: []*types.Operation{\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\t\t\tAddress: \"addr5\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\t\t\tValue: \"-1000\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tType: \"runoff\",\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\tAmounts: []*big.Int{\n\t\t\t\t\t\tbig.NewInt(-1000),\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\terr: false,\n\t\t},\n\t\t\"optional description not met\": {\n\t\t\toperations: []*types.Operation{\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr2\",\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"200\",\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\t{}, \/\/ extra op ignored\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr1\",\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"100\",\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tdescriptions: &Descriptions{\n\t\t\t\tOperationDescriptions: []*OperationDescription{\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAmount: &AmountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSign: PositiveAmountSign,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAllowRepeats: true,\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAmount: &AmountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSign: NegativeAmountSign,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tmatches: []*Match{\n\t\t\t\t{\n\t\t\t\t\tOperations: []*types.Operation{\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\t\t\tAddress: \"addr2\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\t\t\tValue: \"200\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\t\t\tAddress: \"addr1\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\t\t\tValue: \"100\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\tAmounts: []*big.Int{\n\t\t\t\t\t\tbig.NewInt(200),\n\t\t\t\t\t\tbig.NewInt(100),\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tnil,\n\t\t\t},\n\t\t\terr: false,\n\t\t},\n\t\t\"optional description equal amounts not found\": {\n\t\t\toperations: []*types.Operation{\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr2\",\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"200\",\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\t{}, \/\/ extra op ignored\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr1\",\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"100\",\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tdescriptions: &Descriptions{\n\t\t\t\tEqualAmounts: [][]int{{0, 1}},\n\t\t\t\tOperationDescriptions: []*OperationDescription{\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAmount: &AmountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSign: PositiveAmountSign,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAllowRepeats: true,\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAmount: &AmountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSign: NegativeAmountSign,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tmatches: nil,\n\t\t\terr: true,\n\t\t},\n\t\t\"optional description opposite amounts not found\": {\n\t\t\toperations: []*types.Operation{\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr2\",\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"200\",\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\t{}, \/\/ extra op ignored\n\t\t\t\t{\n\t\t\t\t\tAccount: &types.AccountIdentifier{\n\t\t\t\t\t\tAddress: \"addr1\",\n\t\t\t\t\t},\n\t\t\t\t\tAmount: &types.Amount{\n\t\t\t\t\t\tValue: \"100\",\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tdescriptions: &Descriptions{\n\t\t\t\tOppositeAmounts: [][]int{{0, 1}},\n\t\t\t\tOperationDescriptions: []*OperationDescription{\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAmount: &AmountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSign: PositiveAmountSign,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAllowRepeats: true,\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tAccount: &AccountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tAmount: &AmountDescription{\n\t\t\t\t\t\t\tExists: true,\n\t\t\t\t\t\t\tSign: NegativeAmountSign,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tmatches: nil,\n\t\t\terr: true,\n\t\t},\n\t}\n\n\tfor name, test := range tests {\n\t\tt.Run(name, func(t *testing.T) {\n\t\t\tmatches, err := MatchOperations(test.descriptions, test.operations)\n\t\t\tif test.err {\n\t\t\t\tassert.Error(t, err)\n\t\t\t} else {\n\t\t\t\tassert.NoError(t, err)\n\t\t\t}\n\n\t\t\tassert.Equal(t, test.matches, matches)\n\t\t})\n\t}\n}\n\nfunc TestMatch(t *testing.T) {\n\tvar tests = map[string]struct {\n\t\tm *Match\n\n\t\top *types.Operation\n\t\tamount *big.Int\n\t}{\n\t\t\"nil match\": {},\n\t\t\"empty match\": {\n\t\t\tm: &Match{},\n\t\t},\n\t\t\"single op match\": {\n\t\t\tm: &Match{\n\t\t\t\tOperations: []*types.Operation{\n\t\t\t\t\t{\n\t\t\t\t\t\tOperationIdentifier: &types.OperationIdentifier{\n\t\t\t\t\t\t\tIndex: 1,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tAmounts: []*big.Int{\n\t\t\t\t\tbig.NewInt(100),\n\t\t\t\t},\n\t\t\t},\n\t\t\top: &types.Operation{\n\t\t\t\tOperationIdentifier: &types.OperationIdentifier{\n\t\t\t\t\tIndex: 1,\n\t\t\t\t},\n\t\t\t},\n\t\t\tamount: big.NewInt(100),\n\t\t},\n\t\t\"multi-op match\": {\n\t\t\tm: &Match{\n\t\t\t\tOperations: []*types.Operation{\n\t\t\t\t\t{\n\t\t\t\t\t\tOperationIdentifier: &types.OperationIdentifier{\n\t\t\t\t\t\t\tIndex: 1,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tOperationIdentifier: &types.OperationIdentifier{\n\t\t\t\t\t\t\tIndex: 2,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tAmounts: []*big.Int{\n\t\t\t\t\tbig.NewInt(100),\n\t\t\t\t\tbig.NewInt(200),\n\t\t\t\t},\n\t\t\t},\n\t\t\top: &types.Operation{\n\t\t\t\tOperationIdentifier: &types.OperationIdentifier{\n\t\t\t\t\tIndex: 1,\n\t\t\t\t},\n\t\t\t},\n\t\t\tamount: big.NewInt(100),\n\t\t},\n\t\t\"single op match with nil amount\": {\n\t\t\tm: &Match{\n\t\t\t\tOperations: []*types.Operation{\n\t\t\t\t\t{\n\t\t\t\t\t\tOperationIdentifier: &types.OperationIdentifier{\n\t\t\t\t\t\t\tIndex: 1,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tAmounts: []*big.Int{nil},\n\t\t\t},\n\t\t\top: &types.Operation{\n\t\t\t\tOperationIdentifier: &types.OperationIdentifier{\n\t\t\t\t\tIndex: 1,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t}\n\n\tfor name, test := range tests {\n\t\tt.Run(name, func(t *testing.T) {\n\t\t\top, amount := test.m.First()\n\t\t\tassert.Equal(t, test.op, op)\n\t\t\tassert.Equal(t, test.amount, amount)\n\t\t})\n\t}\n}\n","avg_line_length":20.6532586558,"max_line_length":75,"alphanum_fraction":0.509627} +{"size":3664,"ext":"go","lang":"Go","max_stars_count":1.0,"content":"\/\/ OpenRDAP\n\/\/ Copyright 2017 Tom Harwood\n\/\/ MIT License, see the LICENSE file.\n\npackage rdap\n\nimport (\n\t\"reflect\"\n\t\"testing\"\n\n\t\"github.com\/openrdap\/rdap\/test\"\n)\n\nfunc TestVCardErrors(t *testing.T) {\n\tfilenames := []string{\n\t\t\"jcard\/error_invalid_json.json\",\n\t\t\"jcard\/error_bad_top_type.json\",\n\t\t\"jcard\/error_bad_vcard_label.json\",\n\t\t\"jcard\/error_bad_properties_array.json\",\n\t\t\"jcard\/error_bad_property_size.json\",\n\t\t\"jcard\/error_bad_property_name.json\",\n\t\t\"jcard\/error_bad_property_type.json\",\n\t\t\"jcard\/error_bad_property_parameters.json\",\n\t\t\"jcard\/error_bad_property_parameters_2.json\",\n\t\t\"jcard\/error_bad_property_nest_depth.json\",\n\t}\n\n\tfor _, filename := range filenames {\n\t\tj, err := NewVCard(test.LoadFile(filename))\n\n\t\tif j != nil || err == nil {\n\t\t\tt.Errorf(\"jCard with error unexpectedly parsed %s %v %s\\n\", filename, j, err)\n\t\t}\n\t}\n}\n\nfunc TestVCardExample(t *testing.T) {\n\tj, err := NewVCard(test.LoadFile(\"jcard\/example.json\"))\n\tif j == nil || err != nil {\n\t\tt.Errorf(\"jCard parse failed %v %s\\n\", j, err)\n\t}\n\n\tnumProperties := 17\n\tif len(j.Properties) != numProperties {\n\t\tt.Errorf(\"Got %d properties expected %d\", len(j.Properties), numProperties)\n\t}\n\n\texpectedVersion := &VCardProperty{\n\t\tName: \"version\",\n\t\tParameters: make(map[string][]string),\n\t\tType: \"text\",\n\t\tValue: \"4.0\",\n\t}\n\n\tif !reflect.DeepEqual(j.Get(\"version\")[0], expectedVersion) {\n\t\tt.Errorf(\"version field incorrect\")\n\t}\n\n\texpectedN := &VCardProperty{\n\t\tName: \"n\",\n\t\tParameters: make(map[string][]string),\n\t\tType: \"text\",\n\t\tValue: []interface{}{\"Perreault\", \"Simon\", \"\", \"\", []interface{}{\"ing. jr\", \"M.Sc.\"}},\n\t}\n\n\texpectedFlatN := []string{\n\t\t\"Perreault\",\n\t\t\"Simon\",\n\t\t\"\",\n\t\t\"\",\n\t\t\"ing. jr\",\n\t\t\"M.Sc.\",\n\t}\n\n\tif !reflect.DeepEqual(j.Get(\"n\")[0], expectedN) {\n\t\tt.Errorf(\"n field incorrect\")\n\t}\n\n\tif !reflect.DeepEqual(j.Get(\"n\")[0].Values(), expectedFlatN) {\n\t\tt.Errorf(\"n flat value incorrect\")\n\t}\n\n\texpectedTel0 := &VCardProperty{\n\t\tName: \"tel\",\n\t\tParameters: map[string][]string{\"type\": []string{\"work\", \"voice\"}, \"pref\": []string{\"1\"}},\n\t\tType: \"uri\",\n\t\tValue: \"tel:+1-418-656-9254;ext=102\",\n\t}\n\n\tif !reflect.DeepEqual(j.Get(\"tel\")[0], expectedTel0) {\n\t\tt.Errorf(\"tel[0] field incorrect\")\n\t}\n}\n\nfunc TestVCardMixedDatatypes(t *testing.T) {\n\tj, err := NewVCard(test.LoadFile(\"jcard\/mixed.json\"))\n\tif j == nil || err != nil {\n\t\tt.Errorf(\"jCard parse failed %v %s\\n\", j, err)\n\t}\n\n\texpectedMixed := &VCardProperty{\n\t\tName: \"mixed\",\n\t\tParameters: make(map[string][]string),\n\t\tType: \"text\",\n\t\tValue: []interface{}{\"abc\", true, float64(42), nil, []interface{}{\"def\", false, float64(43)}},\n\t}\n\n\texpectedFlatMixed := []string{\n\t\t\"abc\",\n\t\t\"true\",\n\t\t\"42\",\n\t\t\"\",\n\t\t\"def\",\n\t\t\"false\",\n\t\t\"43\",\n\t}\n\n\tif !reflect.DeepEqual(j.Get(\"mixed\")[0], expectedMixed) {\n\t\tt.Errorf(\"mixed field incorrect\")\n\t}\n\n\tflattened := j.Get(\"mixed\")[0].Values()\n\tif !reflect.DeepEqual(flattened, expectedFlatMixed) {\n\t\tt.Errorf(\"mixed flat value incorrect %v\", flattened)\n\t}\n}\n\nfunc TestVCardQuickAccessors(t *testing.T) {\n\tj, err := NewVCard(test.LoadFile(\"jcard\/example.json\"))\n\tif j == nil || err != nil {\n\t\tt.Errorf(\"jCard parse failed %v %s\\n\", j, err)\n\t}\n\n\tgot := []string{\n\t\tj.Name(),\n\t\tj.POBox(),\n\t\tj.ExtendedAddress(),\n\t\tj.StreetAddress(),\n\t\tj.Locality(),\n\t\tj.Region(),\n\t\tj.PostalCode(),\n\t\tj.Country(),\n\t\tj.Tel(),\n\t\tj.Fax(),\n\t\tj.Email(),\n\t}\n\n\texpected := []string{\n\t\t\"Simon Perreault\",\n\t\t\"\",\n\t\t\"Suite D2-630\",\n\t\t\"2875 Laurier\",\n\t\t\"Quebec\",\n\t\t\"QC\",\n\t\t\"G1V 2M2\",\n\t\t\"Canada\",\n\t\t\"+1-418-656-9254;ext=102\",\n\t\t\"\",\n\t\t\"simon.perreault@viagenie.ca\",\n\t}\n\n\tif !reflect.DeepEqual(got, expected) {\n\t\tt.Errorf(\"Got %v expected %v\\n\", got, expected)\n\t}\n}\n","avg_line_length":22.0722891566,"max_line_length":101,"alphanum_fraction":0.625} +{"size":1519,"ext":"go","lang":"Go","max_stars_count":6.0,"content":"package common\n\nimport (\n\t\"bufio\"\n\t\"io\"\n\t\"os\"\n\t\"strings\"\n)\n\nconst (\n\tsep = \".\"\n)\n\ntype Config struct {\n\tConfMap map[string]string\n\tsection string\n}\n\nfunc (c *Config) InitConfig(path string) error {\n\tc.ConfMap = make(map[string]string)\n\tf, err := os.Open(path)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer f.Close()\n\tr := bufio.NewReader(f)\n\tfor {\n\t\tb, _, err := r.ReadLine()\n\t\tif err != nil {\n\t\t\tif err == io.EOF {\n\t\t\t\tbreak\n\t\t\t}\n\t\t\treturn err\n\t\t}\n\t\ts := strings.TrimSpace(string(b))\n\t\tif strings.Index(s, \"#\") == 0 {\n\t\t\tcontinue\n\t\t}\n\t\tn1 := strings.Index(s, \"[\")\n\t\tn2 := strings.LastIndex(s, \"]\")\n\t\tif n1 > -1 && n2 > -1 && n2 > n1+1 {\n\t\t\tc.section = strings.TrimSpace(s[n1+1 : n2])\n\t\t\tcontinue\n\t\t}\n\n\t\teqIndex := strings.Index(s, \"=\")\n\t\tif eqIndex < 0 {\n\t\t\tcontinue\n\t\t}\n\t\tkey := strings.TrimSpace(s[:eqIndex])\n\t\tif len(key) == 0 {\n\t\t\tcontinue\n\t\t}\n\t\tval := strings.TrimSpace(s[eqIndex+1:])\n\n\t\tvalPos := strings.Index(val, \"\\t#\")\n\t\tif valPos > -1 {\n\t\t\tval = val[0:valPos]\n\t\t}\n\n\t\tvalPos = strings.Index(val, \" #\")\n\t\tif valPos > -1 {\n\t\t\tval = val[0:valPos]\n\t\t}\n\n\t\tvalPos = strings.Index(val, \"\\t\/\/\")\n\t\tif valPos > -1 {\n\t\t\tval = val[0:valPos]\n\t\t}\n\n\t\tvalPos = strings.Index(val, \" \/\/\")\n\t\tif valPos > -1 {\n\t\t\tval = val[0:valPos]\n\t\t}\n\n\t\tif len(val) == 0 {\n\t\t\tcontinue\n\t\t}\n\n\t\tsection := \"\"\n\t\tif c.section != \"\" {\n\t\t\tsection += c.section + sep\n\t\t}\n\t\tsection += key\n\t\tc.ConfMap[section] = strings.TrimSpace(val)\n\t}\n\treturn nil\n}\n\nfunc (c Config) Read(key string) string {\n\tv, ok := c.ConfMap[key]\n\tif !ok {\n\t\treturn \"\"\n\t}\n\treturn v\n}\n","avg_line_length":15.6597938144,"max_line_length":48,"alphanum_fraction":0.5576036866} +{"size":6306,"ext":"go","lang":"Go","max_stars_count":null,"content":"\/\/ Copyright 2016-2017 Authors of Cilium\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\npackage api\n\nimport (\n\t\"fmt\"\n\t\"net\"\n\t\"strconv\"\n\t\"strings\"\n)\n\nconst (\n\tmaxPorts = 40\n\t\/\/ MaxCIDREntries is used to prevent compile failures at runtime.\n\tMaxCIDREntries = 40\n)\n\n\/\/ Sanitize validates and sanitizes a policy rule. Minor edits such as\n\/\/ capitalization of the protocol name are automatically fixed up. More\n\/\/ fundamental violations will cause an error to be returned.\nfunc (r Rule) Sanitize() error {\n\tfor i := range r.Ingress {\n\t\tif err := r.Ingress[i].sanitize(); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tfor i := range r.Egress {\n\t\tif err := r.Egress[i].sanitize(); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn nil\n}\n\nfunc (i *IngressRule) sanitize() error {\n\tif len(i.FromCIDR) > 0 && len(i.FromEndpoints) > 0 {\n\t\treturn fmt.Errorf(\"Combining FromCIDR and FromEndpoints is not supported yet\")\n\t}\n\n\tif len(i.FromCIDR) > 0 && len(i.ToPorts) > 0 {\n\t\treturn fmt.Errorf(\"Combining ToPorts and FromCIDR is not supported yet\")\n\t}\n\n\tfor n := range i.ToPorts {\n\t\tif err := i.ToPorts[n].sanitize(); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tif l := len(i.FromCIDR); l > MaxCIDREntries {\n\t\treturn fmt.Errorf(\"too many ingress L3 entries %d\/%d\", l, MaxCIDREntries)\n\t}\n\n\tfor n := range i.FromCIDR {\n\t\tif err := i.FromCIDR[n].sanitize(); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tfor n := range i.FromCIDRSet {\n\t\tif err := i.FromCIDRSet[n].sanitize(); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn nil\n}\n\nfunc (e *EgressRule) sanitize() error {\n\tif len(e.ToCIDR) > 0 && len(e.ToPorts) > 0 {\n\t\treturn fmt.Errorf(\"Combining ToPorts and ToCIDR is not supported yet\")\n\t}\n\n\tfor i := range e.ToPorts {\n\t\tif err := e.ToPorts[i].sanitize(); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\tif l := len(e.ToCIDR); l > MaxCIDREntries {\n\t\treturn fmt.Errorf(\"too many egress L3 entries %d\/%d\", l, MaxCIDREntries)\n\t}\n\tfor i := range e.ToCIDR {\n\t\tif err := e.ToCIDR[i].sanitize(); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tfor i := range e.ToCIDRSet {\n\t\tif err := e.ToCIDRSet[i].sanitize(); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn nil\n}\n\n\/\/ Sanitize sanitizes Kafka rules\n\/\/ TODO we need to add support to check\n\/\/ wildcard and prefix\/suffix later on.\nfunc (kr *PortRuleKafka) Sanitize() error {\n\tif len(kr.APIKey) > 0 {\n\t\tn, ok := KafkaAPIKeyMap[strings.ToLower(kr.APIKey)]\n\t\tif !ok {\n\t\t\treturn fmt.Errorf(\"invalid Kafka APIKey :%q\", kr.APIKey)\n\t\t}\n\t\tkr.apiKeyInt = &n\n\t}\n\n\tif len(kr.APIVersion) > 0 {\n\t\tn, err := strconv.ParseInt(kr.APIVersion, 10, 16)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"invalid Kafka APIVersion :%q\",\n\t\t\t\tkr.APIVersion)\n\t\t}\n\t\tn16 := int16(n)\n\t\tkr.apiVersionInt = &n16\n\t}\n\n\tif len(kr.Topic) > 0 {\n\t\tif len(kr.Topic) > KafkaMaxTopicLen {\n\t\t\treturn fmt.Errorf(\"kafka topic exceeds maximum len of %d\",\n\t\t\t\tKafkaMaxTopicLen)\n\t\t}\n\t\t\/\/ This check allows suffix and prefix matching\n\t\t\/\/ for topic.\n\t\tif KafkaTopicValidChar.MatchString(kr.Topic) == false {\n\t\t\treturn fmt.Errorf(\"invalid Kafka Topic name \\\"%s\\\"\", kr.Topic)\n\t\t}\n\t}\n\treturn nil\n}\n\nfunc (pr *L7Rules) sanitize() error {\n\tif (pr.HTTP != nil) && (pr.Kafka != nil) {\n\t\treturn fmt.Errorf(\"multiple L7 protocol rule types specified in single rule\")\n\t}\n\n\tif pr.Kafka != nil {\n\t\tfor i := range pr.Kafka {\n\t\t\tif err := pr.Kafka[i].Sanitize(); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\treturn nil\n}\n\nfunc (pr *PortRule) sanitize() error {\n\tif len(pr.Ports) > maxPorts {\n\t\treturn fmt.Errorf(\"too many ports, the max is %d\", maxPorts)\n\t}\n\tfor i := range pr.Ports {\n\t\tif err := pr.Ports[i].sanitize(); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\t\/\/ Sanitize L7 rules\n\tif pr.Rules != nil {\n\t\tif err := pr.Rules.sanitize(); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}\n\nfunc (pp *PortProtocol) sanitize() error {\n\tif pp.Port == \"\" {\n\t\treturn fmt.Errorf(\"Port must be specified\")\n\t}\n\n\tp, err := strconv.ParseUint(pp.Port, 0, 16)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Unable to parse port: %s\", err)\n\t}\n\n\tif p == 0 {\n\t\treturn fmt.Errorf(\"Port cannot be 0\")\n\t}\n\n\tpp.Protocol, err = ParseL4Proto(string(pp.Protocol))\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}\n\nfunc (cidr CIDR) sanitize() error {\n\tstrCIDR := string(cidr)\n\tif strCIDR == \"\" {\n\t\treturn fmt.Errorf(\"IP must be specified\")\n\t}\n\n\t_, ipnet, err := net.ParseCIDR(strCIDR)\n\tif err == nil {\n\t\t\/\/ Returns the prefix length as zero if the mask is not continuous.\n\t\tones, _ := ipnet.Mask.Size()\n\t\tif ones == 0 {\n\t\t\treturn fmt.Errorf(\"Mask length can not be zero\")\n\t\t}\n\t} else {\n\t\t\/\/ Try to parse as a fully masked IP or an IP subnetwork\n\t\tip := net.ParseIP(strCIDR)\n\t\tif ip == nil {\n\t\t\treturn fmt.Errorf(\"Unable to parse CIDR: %s\", err)\n\t\t}\n\t}\n\n\treturn nil\n}\n\n\/\/ sanitize validates a CIDRRule by checking that the CIDR prefix itself is\n\/\/ valid, and ensuring that all of the exception CIDR prefixes are contained\n\/\/ within the allowed CIDR prefix.\nfunc (c *CIDRRule) sanitize() error {\n\n\t\/\/ Only allow notation \/. Note that this differs from\n\t\/\/ the logic in api.CIDR.Sanitize().\n\t_, cidrNet, err := net.ParseCIDR(string(c.Cidr))\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t\/\/ Returns the prefix length as zero if the mask is not continuous.\n\tones, _ := cidrNet.Mask.Size()\n\tif ones == 0 {\n\t\treturn fmt.Errorf(\"Mask length can not be zero\")\n\t}\n\n\t\/\/ Ensure that each provided exception CIDR prefix is formatted correctly,\n\t\/\/ and is contained within the CIDR prefix to\/from which we want to allow\n\t\/\/ traffic.\n\tfor _, p := range c.ExceptCIDRs {\n\t\texceptCIDRAddr, _, err := net.ParseCIDR(string(p))\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\t\/\/ Note: this also checks that the allow CIDR prefix and the exception\n\t\t\/\/ CIDR prefixes are part of the same address family.\n\t\tif !cidrNet.Contains(exceptCIDRAddr) {\n\t\t\treturn fmt.Errorf(\"allow CIDR prefix %s does not contain \"+\n\t\t\t\t\"exclude CIDR prefix %s\", c.Cidr, p)\n\t\t}\n\t}\n\n\treturn nil\n}\n","avg_line_length":23.7962264151,"max_line_length":80,"alphanum_fraction":0.6569933397} +{"size":1943,"ext":"go","lang":"Go","max_stars_count":1.0,"content":"\/*\nCopyright 2014 Google Inc. All rights reserved.\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\nyou may not use this file except in compliance with the License.\nYou may obtain a copy of the License at\n\n http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\nUnless required by applicable law or agreed to in writing, software\ndistributed under the License is distributed on an \"AS IS\" BASIS,\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\nSee the License for the specific language governing permissions and\nlimitations under the License.\n*\/\n\npackage v1beta2\n\nimport (\n\t\"strings\"\n\n\t\"github.com\/GoogleCloudPlatform\/kubernetes\/pkg\/api\"\n\t\"github.com\/GoogleCloudPlatform\/kubernetes\/pkg\/util\"\n)\n\nfunc init() {\n\tapi.Scheme.AddDefaultingFuncs(\n\t\tfunc(obj *Volume) {\n\t\t\tif util.AllPtrFieldsNil(&obj.Source) {\n\t\t\t\tobj.Source = VolumeSource{\n\t\t\t\t\tEmptyDir: &EmptyDir{},\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\t\tfunc(obj *Port) {\n\t\t\tif obj.Protocol == \"\" {\n\t\t\t\tobj.Protocol = ProtocolTCP\n\t\t\t}\n\t\t},\n\t\tfunc(obj *Container) {\n\t\t\tif obj.ImagePullPolicy == \"\" {\n\t\t\t\t\/\/ TODO(dchen1107): Move ParseImageName code to pkg\/util\n\t\t\t\tparts := strings.Split(obj.Image, \":\")\n\t\t\t\t\/\/ Check image tag\n\t\t\t\tif parts[len(parts)-1] == \"latest\" {\n\t\t\t\t\tobj.ImagePullPolicy = PullAlways\n\t\t\t\t} else {\n\t\t\t\t\tobj.ImagePullPolicy = PullIfNotPresent\n\t\t\t\t}\n\t\t\t}\n\t\t\tif obj.TerminationMessagePath == \"\" {\n\t\t\t\tobj.TerminationMessagePath = TerminationMessagePathDefault\n\t\t\t}\n\t\t},\n\t\tfunc(obj *RestartPolicy) {\n\t\t\tif util.AllPtrFieldsNil(obj) {\n\t\t\t\tobj.Always = &RestartPolicyAlways{}\n\t\t\t}\n\t\t},\n\t\tfunc(obj *Service) {\n\t\t\tif obj.Protocol == \"\" {\n\t\t\t\tobj.Protocol = ProtocolTCP\n\t\t\t}\n\t\t\tif obj.SessionAffinity == \"\" {\n\t\t\t\tobj.SessionAffinity = AffinityTypeNone\n\t\t\t}\n\t\t},\n\t\tfunc(obj *PodSpec) {\n\t\t\tif obj.DNSPolicy == \"\" {\n\t\t\t\tobj.DNSPolicy = DNSClusterFirst\n\t\t\t}\n\t\t},\n\t\tfunc(obj *ContainerManifest) {\n\t\t\tif obj.DNSPolicy == \"\" {\n\t\t\t\tobj.DNSPolicy = DNSClusterFirst\n\t\t\t}\n\t\t},\n\t)\n}\n","avg_line_length":24.2875,"max_line_length":72,"alphanum_fraction":0.6762738034} +{"size":2726,"ext":"go","lang":"Go","max_stars_count":87.0,"content":"\/\/ Copyright 2020-2021 Politecnico di Torino\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\npackage bastion_controller\n\nimport (\n\t\"os\"\n\t\"path\/filepath\"\n\t\"testing\"\n\n\t. \"github.com\/onsi\/ginkgo\"\n\t. \"github.com\/onsi\/gomega\"\n\t\"k8s.io\/client-go\/kubernetes\/scheme\"\n\t\"k8s.io\/client-go\/rest\"\n\tctrl \"sigs.k8s.io\/controller-runtime\"\n\t\"sigs.k8s.io\/controller-runtime\/pkg\/client\"\n\t\"sigs.k8s.io\/controller-runtime\/pkg\/envtest\"\n\t\"sigs.k8s.io\/controller-runtime\/pkg\/envtest\/printer\"\n\n\tcrownlabsv1alpha1 \"github.com\/netgroup-polito\/CrownLabs\/operators\/api\/v1alpha1\"\n)\n\n\/\/ These tests use Ginkgo (BDD-style Go testing framework). Refer to\n\/\/ http:\/\/onsi.github.io\/ginkgo\/ to learn more about Ginkgo.\n\nvar cfg *rest.Config\nvar k8sClient client.Client\nvar testEnv *envtest.Environment\n\nfunc TestAPIs(t *testing.T) {\n\tRegisterFailHandler(Fail)\n\n\tRunSpecsWithDefaultAndCustomReporters(t,\n\t\t\"Controller Suite\",\n\t\t[]Reporter{printer.NewlineReporter{}})\n}\n\nvar _ = BeforeSuite(func(done Done) {\n\tBy(\"bootstrapping test environment\")\n\ttestEnv = &envtest.Environment{\n\t\tCRDDirectoryPaths: []string{filepath.Join(\"..\", \"..\", \"deploy\", \"crds\")},\n\t}\n\n\tvar err error\n\tcfg, err = testEnv.Start()\n\tExpect(err).ToNot(HaveOccurred())\n\tExpect(cfg).ToNot(BeNil())\n\n\terr = crownlabsv1alpha1.AddToScheme(scheme.Scheme)\n\tExpect(err).NotTo(HaveOccurred())\n\n\t\/\/ +kubebuilder:scaffold:scheme\n\n\tk8sManager, err := ctrl.NewManager(cfg, ctrl.Options{\n\t\tScheme: scheme.Scheme,\n\t\tMetricsBindAddress: \"0\",\n\t})\n\tExpect(err).ToNot(HaveOccurred())\n\n\terr = (&BastionReconciler{\n\t\tClient: k8sManager.GetClient(),\n\t\tScheme: k8sManager.GetScheme(),\n\t\tAuthorizedKeysPath: \".\/authorized_keys_test\",\n\t\tReconcileDeferHook: GinkgoRecover,\n\t}).SetupWithManager(k8sManager)\n\tExpect(err).ToNot(HaveOccurred())\n\n\tgo func() {\n\t\terr = k8sManager.Start(ctrl.SetupSignalHandler())\n\t\tExpect(err).ToNot(HaveOccurred())\n\t}()\n\n\tk8sClient = k8sManager.GetClient()\n\tExpect(k8sClient).ToNot(BeNil())\n\n\tclose(done)\n}, 60)\n\nvar _ = AfterSuite(func() {\n\tBy(\"Deleting the file used to test\")\n\tExpect(os.Remove(\".\/authorized_keys_test\")).Should(Succeed())\n\n\tBy(\"tearing down the test environment\")\n\terr := testEnv.Stop()\n\tExpect(err).ToNot(HaveOccurred())\n})\n","avg_line_length":27.8163265306,"max_line_length":80,"alphanum_fraction":0.726705796} +{"size":218,"ext":"go","lang":"Go","max_stars_count":1.0,"content":"package test\n\n\/\/ Square type\ntype Square struct {\n\tLength int\n\tColor string\n}\n\ntype Rectangle struct {\n\tSquare\n\tLong int\n}\n\n\/\/ returns the area of the Square\nfunc (s Square) Area() int {\n\treturn s.Length * s.Length\n}\n","avg_line_length":12.1111111111,"max_line_length":33,"alphanum_fraction":0.6972477064} +{"size":8191,"ext":"go","lang":"Go","max_stars_count":2.0,"content":"\/\/go:generate protoc -I msg --go_out=plugins=grpc:msg msg\/msg.proto\npackage main\n\nimport (\n\t\"fmt\"\n\t\/\/\"regexp\"\n\t\"sort\"\n\t\"strconv\"\n\t\"strings\"\n\n\ttb \"github.com\/go-telegram-bot-api\/telegram-bot-api\"\n\n\tpb \"github.com\/usher2\/u2byckbot\/msg\"\n)\n\nfunc botUpdates(c pb.CheckClient, bot *tb.BotAPI, updatesChan tb.UpdatesChannel) {\n\tfor {\n\t\tselect {\n\t\tcase update := <-updatesChan:\n\t\t\tswitch {\n\t\t\tcase update.Message != nil: \/\/ ignore any non-Message Updates\n\t\t\t\tif update.Message.Text != \"\" {\n\t\t\t\t\tif update.Message.Chat.Type == \"private\" ||\n\t\t\t\t\t\t(update.Message.ReplyToMessage == nil &&\n\t\t\t\t\t\t\tupdate.Message.ForwardFromMessageID == 0) {\n\t\t\t\t\t\tvar uname string\n\t\t\t\t\t\t\/\/ who writing\n\t\t\t\t\t\tif update.Message.From != nil {\n\t\t\t\t\t\t\tuname = update.Message.From.UserName\n\t\t\t\t\t\t}\n\t\t\t\t\t\t\/\/ chat\/dialog\n\t\t\t\t\t\tchat := update.Message.Chat\n\t\t\t\t\t\tgo Talks(c, bot, uname, chat, \"\", 0, \"\", update.Message.Text)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\tcase update.CallbackQuery != nil:\n\t\t\t\tvar (\n\t\t\t\t\tuname string\n\t\t\t\t\treq string\n\t\t\t\t)\n\t\t\t\t\/\/ who writing\n\t\t\t\tif update.CallbackQuery.From != nil {\n\t\t\t\t\tuname = update.CallbackQuery.From.UserName\n\t\t\t\t}\n\t\t\t\t\/\/ chat\/dialog\n\t\t\t\tvar chat *tb.Chat\n\t\t\t\tif update.CallbackQuery.Message != nil {\n\t\t\t\t\tchat = update.CallbackQuery.Message.Chat\n\t\t\t\t\ti := strings.IndexByte(update.CallbackQuery.Message.Text, '\\n')\n\t\t\t\t\tif i > 0 {\n\t\t\t\t\t\tswitch {\n\t\t\t\t\t\tcase strings.HasPrefix(update.CallbackQuery.Message.Text[:i], \"\\U0001f525 \") &&\n\t\t\t\t\t\t\tstrings.HasSuffix(update.CallbackQuery.Message.Text[:i], \" \u0437\u0430\u0431\u043b\u043e\u043a\u0438\u0440\u043e\u0432\u0430\u043d\"):\n\t\t\t\t\t\t\treq = strings.TrimSuffix(strings.TrimPrefix(update.CallbackQuery.Message.Text[:i], \"\\U0001f525 \"), \" \u0437\u0430\u0431\u043b\u043e\u043a\u0438\u0440\u043e\u0432\u0430\u043d\")\n\t\t\t\t\t\tcase strings.Contains(update.CallbackQuery.Message.Text[:i], \"\/n_\"):\n\t\t\t\t\t\t\tj1 := strings.Index(update.CallbackQuery.Message.Text[:i], \"\/n_\")\n\t\t\t\t\t\t\tj2 := strings.IndexByte(update.CallbackQuery.Message.Text[j1:i], ' ')\n\t\t\t\t\t\t\tif j2 != -1 {\n\t\t\t\t\t\t\t\treq = update.CallbackQuery.Message.Text[j1 : j1+j2]\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tgo bot.AnswerCallbackQuery(tb.NewCallback(update.CallbackQuery.ID, \"\")) \/\/ for some reason\n\t\t\t\tgo Talks(c, bot, uname, chat, \"\", update.CallbackQuery.Message.MessageID, update.CallbackQuery.Data, req)\n\t\t\tcase update.InlineQuery != nil:\n\t\t\t\tif update.InlineQuery.Query != \"\" {\n\t\t\t\t\tvar uname string\n\t\t\t\t\t\/\/ who writing\n\t\t\t\t\tif update.InlineQuery.From != nil {\n\t\t\t\t\t\tuname = update.InlineQuery.From.UserName\n\t\t\t\t\t}\n\t\t\t\t\tgo Talks(c, bot, uname, nil, update.InlineQuery.ID, 0, \"\", update.InlineQuery.Query)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n}\n\nvar noAdCount int = 0\n\nconst NO_AD_NUMBER = 20\n\nfunc makePagination(offset TPagination, pages []TPagination) tb.InlineKeyboardMarkup {\n\tvar (\n\t\tkeyboard [][]tb.InlineKeyboardButton\n\t\to int\n\t\tpict string\n\t)\n\tsort.Slice(pages, func(i, j int) bool {\n\t\treturn pages[i].Tag < pages[j].Tag\n\t})\n\tfor i, _ := range pages {\n\t\tcurTag := pages[i].Tag\n\t\tif pages[i].Count > PRINT_LIMIT {\n\t\t\trow := tb.NewInlineKeyboardRow()\n\t\t\tif offset.Tag != curTag {\n\t\t\t\to = 0\n\t\t\t} else {\n\t\t\t\to = offset.Count\n\t\t\t}\n\t\t\tslug := strconv.Itoa(o\/PRINT_LIMIT + 1)\n\t\t\tswitch curTag {\n\t\t\tcase OFFSET_DOMAIN:\n\t\t\t\tslug = \"\u0434\u043e\u043c\u0435\u043d\"\n\t\t\tcase OFFSET_URL:\n\t\t\t\tslug = \"URL\"\n\t\t\tcase OFFSET_IP4:\n\t\t\t\tslug = \"IPv4\"\n\t\t\t}\n\t\t\tif pages[i].Count > 2*PRINT_LIMIT {\n\t\t\t\tpict = \"\\u23ea\"\n\t\t\t\tif o == 0 {\n\t\t\t\t\tpict = \"\\U000023f9\"\n\t\t\t\t}\n\t\t\t\trow = append(row,\n\t\t\t\t\ttb.NewInlineKeyboardButtonData(fmt.Sprintf(\"%d %s\", 1, pict),\n\t\t\t\t\t\tfmt.Sprintf(\"%d:%d\", curTag, 0)),\n\t\t\t\t)\n\t\t\t}\n\t\t\t_o := o - PRINT_LIMIT\n\t\t\tif _o < 0 {\n\t\t\t\t_o = 0\n\t\t\t}\n\t\t\tpict = \"\\u23ee\"\n\t\t\tif o == 0 {\n\t\t\t\tpict = \"\\U000023f9\"\n\t\t\t}\n\t\t\trow = append(row,\n\t\t\t\ttb.NewInlineKeyboardButtonData(fmt.Sprintf(\"%d %s\", _o\/PRINT_LIMIT+1, pict),\n\t\t\t\t\tfmt.Sprintf(\"%d:%d\", curTag, _o)),\n\t\t\t)\n\t\t\trow = append(row,\n\t\t\t\ttb.NewInlineKeyboardButtonData(fmt.Sprintf(\"\\u2022 %s \\u2022\", slug),\n\t\t\t\t\tfmt.Sprintf(\"%d:%d\", curTag, o)),\n\t\t\t)\n\t\t\t_o = o + PRINT_LIMIT\n\t\t\tif _o > pages[i].Count-(pages[i].Count%PRINT_LIMIT) {\n\t\t\t\t_o = pages[i].Count - (pages[i].Count % PRINT_LIMIT)\n\t\t\t}\n\t\t\tif _o == pages[i].Count {\n\t\t\t\t_o -= PRINT_LIMIT\n\t\t\t}\n\t\t\tpict = \"\\u23ed\"\n\t\t\tif o >= _o {\n\t\t\t\tpict = \"\\U000023f9\"\n\t\t\t}\n\t\t\t_p := _o\/PRINT_LIMIT + 1\n\t\t\trow = append(row,\n\t\t\t\ttb.NewInlineKeyboardButtonData(fmt.Sprintf(\"%s %d\", pict, _p),\n\t\t\t\t\tfmt.Sprintf(\"%d:%d\", curTag, _o)),\n\t\t\t)\n\t\t\tif pages[i].Count > 2*PRINT_LIMIT {\n\t\t\t\t_o = pages[i].Count - (pages[i].Count % PRINT_LIMIT)\n\t\t\t\tif _o == pages[i].Count {\n\t\t\t\t\t_o -= PRINT_LIMIT\n\t\t\t\t}\n\t\t\t\t_p = _o\/PRINT_LIMIT + 1\n\t\t\t\tpict = \"\\u23e9\"\n\t\t\t\tif o >= _o {\n\t\t\t\t\tpict = \"\\U000023f9\"\n\t\t\t\t}\n\t\t\t\trow = append(row,\n\t\t\t\t\ttb.NewInlineKeyboardButtonData(fmt.Sprintf(\"%s %d\", pict, _p),\n\t\t\t\t\t\tfmt.Sprintf(\"%d:%d\", curTag, _o)),\n\t\t\t\t)\n\t\t\t}\n\t\t\tkeyboard = append(keyboard, row)\n\t\t}\n\t}\n\treturn tb.InlineKeyboardMarkup{\n\t\tInlineKeyboard: keyboard,\n\t}\n}\n\nfunc sendMessage(bot *tb.BotAPI, chat *tb.Chat, inlineId string, messageId int, text string, offset TPagination, pages []TPagination) {\n\tif chat != nil {\n\t\tif noAdCount >= NO_AD_NUMBER {\n\t\t\ttext += \"--- \\n\" + DonateFooter\n\t\t\tnoAdCount = 0\n\t\t} else {\n\t\t\t\/\/text += Footer\n\t\t\tnoAdCount++\n\t\t}\n\t\tif messageId > 0 {\n\t\t\tmsg := tb.NewEditMessageText(chat.ID, messageId, text)\n\t\t\tmsg.ParseMode = tb.ModeMarkdown\n\t\t\tmsg.DisableWebPagePreview = true\n\t\t\tinlineKeyboard := makePagination(offset, pages)\n\t\t\tif len(inlineKeyboard.InlineKeyboard) > 0 {\n\t\t\t\tmsg.ReplyMarkup = &inlineKeyboard\n\t\t\t}\n\t\t\t_, err := bot.Send(msg)\n\t\t\tif err != nil {\n\t\t\t\tWarning.Printf(\"Error sending message: %s\\n\", err.Error())\n\t\t\t}\n\t\t} else {\n\t\t\tmsg := tb.NewMessage(chat.ID, text)\n\t\t\tmsg.ParseMode = tb.ModeMarkdown\n\t\t\tmsg.DisableWebPagePreview = true\n\t\t\tinlineKeyboard := makePagination(offset, pages)\n\t\t\tif len(inlineKeyboard.InlineKeyboard) > 0 {\n\t\t\t\tmsg.ReplyMarkup = inlineKeyboard\n\t\t\t}\n\t\t\t_, err := bot.Send(msg)\n\t\t\tif err != nil {\n\t\t\t\tWarning.Printf(\"Error sending message: %s\\n\", err.Error())\n\t\t\t}\n\t\t}\n\t} else if inlineId != \"\" {\n\t\tarticle := tb.InlineQueryResultArticle{\n\t\t\tID: inlineId,\n\t\t\tTitle: \"Search result\",\n\t\t\tType: \"article\",\n\t\t\tInputMessageContent: tb.InputTextMessageContent{\n\t\t\t\tText: text + Footer,\n\t\t\t\tParseMode: tb.ModeMarkdown,\n\t\t\t\tDisableWebPagePreview: true,\n\t\t\t},\n\t\t}\n\t\tinlineConf := tb.InlineConfig{\n\t\t\tInlineQueryID: inlineId,\n\t\t\tResults: []interface{}{article},\n\t\t}\n\t\tif _, err := bot.AnswerInlineQuery(inlineConf); err != nil {\n\t\t\tWarning.Printf(\"Error sending answer: %s\\n\", err.Error())\n\t\t}\n\t}\n}\n\n\/\/ Handle commands\nfunc Talks(c pb.CheckClient, bot *tb.BotAPI, uname string, chat *tb.Chat, inlineId string, messageId int, callbackData, text string) {\n\tvar (\n\t\treply string\n\t\tpages []TPagination\n\t\toffset TPagination\n\t)\n\tif callbackData != \"\" && strings.IndexByte(callbackData, ':') != -1 {\n\t\ti := strings.IndexByte(callbackData, ':')\n\t\tif i != len(callbackData)-1 {\n\t\t\toffset.Tag, _ = strconv.Atoi(callbackData[:i])\n\t\t\toffset.Count, _ = strconv.Atoi(callbackData[i+1:])\n\t\t}\n\t}\n\t\/\/log.Printf(\"[%s] %d %s\", UserName, ChatID, Text)\n\tif i := strings.IndexByte(text, '\\n'); i != -1 {\n\t\ttext = strings.Trim(text[:i], \" \")\n\t}\n\tswitch {\n\tcase text == \"\":\n\t\treply = \"\\U0001f440 \u041d\u0435\u0447\u0435\u0433\u043e \u0438\u0441\u043a\u0430\u0442\u044c\\n\"\n\tcase text == \"\/help\":\n\t\treply = HelpMessage\n\tcase text == \"\/helpen\":\n\t\treply = HelpMessageEn\n\tcase text == \"\/donate\":\n\t\treply = DonateMessage\n\tcase text == \"\/start\":\n\t\treply = \"\u0412\u0456\u0442\u0430\u044e \u0446\u044f\u0431\u0435, \" + Sanitize(uname) + \"!\\n\"\n\tcase text == \"\/ping\":\n\t\treply = Ping(c)\n\tcase text == \"\/ck\" || text == \"\/check\":\n\t\treply = HelpMessage\n\tcase strings.HasPrefix(text, \"\/ck \") || strings.HasPrefix(text, \"\/check \"):\n\t\treply, pages = mainSearch(c, strings.TrimPrefix(strings.TrimPrefix(text, \"\/ck \"), \"\/check \"), offset)\n\tcase strings.HasPrefix(text, \"\/n_\") || strings.HasPrefix(text, \"#\"):\n\t\targs := \"\"\n\t\tif strings.HasPrefix(text, \"\/n_\") {\n\t\t\targs = strings.TrimPrefix(text, \"\/n_\")\n\t\t} else if strings.HasPrefix(text, \"#\") {\n\t\t\targs = strings.TrimPrefix(text, \"#\")\n\t\t}\n\t\treply, pages = numberSearch(c, args, offset)\n\tcase strings.HasPrefix(text, \"\/\"):\n\t\treply = \"\\U0001f523 iNJALID DEJICE\\n\"\n\tdefault:\n\t\treply, pages = mainSearch(c, text, offset)\n\t}\n\tif reply == \"\" {\n\t\treply = \"\\U0001f463 \u041d\u0438\u0447\u0435\u0433\u043e \u043d\u0435 \u043d\u0430\u0448\u043b\u043e\u0441\u044c\\n\"\n\t}\n\t\/\/Debug.Printf(\"************ %s\", reply)\n\tsendMessage(bot, chat, inlineId, messageId, reply, offset, pages)\n}\n","avg_line_length":28.7403508772,"max_line_length":135,"alphanum_fraction":0.6195824686} +{"size":31272,"ext":"go","lang":"Go","max_stars_count":null,"content":"package eos\n\nimport (\n\t\"encoding\/binary\"\n\t\"errors\"\n\t\"fmt\"\n\t\"io\"\n\t\"io\/ioutil\"\n\t\"math\"\n\t\"reflect\"\n\t\"strings\"\n\t\"time\"\n\t\"unicode\/utf8\"\n\n\t\"github.com\/marsofsnow\/eos-go\/ecc\"\n\t\"go.uber.org\/zap\"\n)\n\n\/\/ UnmarshalerBinary is the interface implemented by types\n\/\/ that can unmarshal an EOSIO binary description of themselves.\n\/\/\n\/\/ **Warning** This is experimental, exposed only for internal usage for now.\ntype UnmarshalerBinary interface {\n\tUnmarshalBinary(decoder *Decoder) error\n}\n\nvar TypeSize = struct {\n\tBool int\n\tByte int\n\n\tInt8 int\n\tInt16 int\n\n\tUint8 int\n\tUint16 int\n\tUint32 int\n\tUint64 int\n\tUint128 int\n\n\tFloat32 int\n\tFloat64 int\n\n\tChecksum160 int\n\tChecksum256 int\n\tChecksum512 int\n\n\tPublicKey int\n\tSignature int\n\n\tTstamp int\n\tBlockTimestamp int\n\n\tCurrencyName int\n\n\t\/\/ Deprecated Fields\n\n\t\/\/ Deprecated: use Uint8 instead\n\tUInt8 int\n\t\/\/ Deprecated: use Uint16 instead\n\tUInt16 int\n\t\/\/ Deprecated: use Uint32 instead\n\tUInt32 int\n\t\/\/ Deprecated: use Uint64 instead\n\tUInt64 int\n\t\/\/ Deprecated: use Uint128 instead\n\tUInt128 int\n}{\n\tByte: 1,\n\tBool: 1,\n\n\tInt8: 1,\n\tInt16: 2,\n\n\tUint8: 1,\n\tUint16: 2,\n\tUint32: 4,\n\tUint64: 8,\n\tUint128: 16,\n\n\tFloat32: 4,\n\tFloat64: 8,\n\n\tChecksum160: 20,\n\tChecksum256: 32,\n\tChecksum512: 64,\n\n\tPublicKey: 34,\n\tSignature: 66,\n\n\tTstamp: 8,\n\tBlockTimestamp: 4,\n\n\tCurrencyName: 7,\n}\n\nfunc init() {\n\t\/\/ Deprecated fields initialization\n\tTypeSize.UInt8 = TypeSize.Uint8\n\tTypeSize.UInt16 = TypeSize.Uint16\n\tTypeSize.UInt32 = TypeSize.Uint32\n\tTypeSize.UInt64 = TypeSize.Uint64\n\tTypeSize.UInt128 = TypeSize.Uint128\n}\n\nvar RegisteredActions = map[AccountName]map[ActionName]reflect.Type{}\n\n\/\/ Registers Action objects..\nfunc RegisterAction(accountName AccountName, actionName ActionName, obj interface{}) {\n\t\/\/ TODO: lock or som'th.. unless we never call after boot time..\n\tif RegisteredActions[accountName] == nil {\n\t\tRegisteredActions[accountName] = make(map[ActionName]reflect.Type)\n\t}\n\tRegisteredActions[accountName][actionName] = reflect.TypeOf(obj)\n}\n\n\/\/ Decoder implements the EOS unpacking, similar to FC_BUFFER\ntype Decoder struct {\n\tdata []byte\n\tpos int\n\tdecodeP2PMessage bool\n\tdecodeActions bool\n}\n\nfunc NewDecoder(data []byte) *Decoder {\n\treturn &Decoder{\n\t\tdata: data,\n\t\tdecodeP2PMessage: true,\n\t\tdecodeActions: true,\n\t}\n}\n\nfunc (d *Decoder) DecodeP2PMessage(decode bool) {\n\td.decodeP2PMessage = decode\n}\n\nfunc (d *Decoder) DecodeActions(decode bool) {\n\td.decodeActions = decode\n}\n\ntype DecodeOption = interface{}\n\ntype optionalFieldType bool\n\nconst OptionalField optionalFieldType = true\n\nfunc (d *Decoder) LastPos() int {\n\treturn d.pos\n}\n\nfunc (d *Decoder) Decode(v interface{}, options ...DecodeOption) (err error) {\n\toptionalField := false\n\tfor _, option := range options {\n\t\tif _, isOptionalField := option.(optionalFieldType); isOptionalField {\n\t\t\toptionalField = true\n\t\t}\n\t}\n\n\trv := reflect.Indirect(reflect.ValueOf(v))\n\tif !rv.CanAddr() {\n\t\treturn fmt.Errorf(\"can only decode to pointer type, got %T\", v)\n\t}\n\tt := rv.Type()\n\n\tif traceEnabled {\n\t\tzlog.Debug(\"decode type\", typeField(\"type\", v), zap.Bool(\"optional\", optionalField))\n\t}\n\n\tif optionalField {\n\t\tvar isPresent byte\n\t\tif d.hasRemaining() {\n\t\t\tisPresent, err = d.ReadByte()\n\t\t\tif err != nil {\n\t\t\t\terr = fmt.Errorf(\"decode: %t isPresent, %s\", v, err)\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\n\t\tif isPresent == 0 {\n\t\t\tif traceEnabled {\n\t\t\t\tzlog.Debug(\"skipping optional\", typeField(\"type\", v))\n\t\t\t}\n\n\t\t\trv.Set(reflect.Zero(t))\n\t\t\treturn\n\t\t}\n\t}\n\n\tif t.Kind() == reflect.Ptr {\n\t\tt = t.Elem()\n\t\tnewRV := reflect.New(t)\n\t\trv.Set(newRV)\n\n\t\t\/\/ At this point, `newRV` is a pointer to our target type, we need to check here because\n\t\t\/\/ after that, when `reflect.Indirect` is used, we get a `**` instead of a `*`\n\t\t\/\/ which breaks the interface checking.\n\t\t\/\/\n\t\t\/\/ Ultimetaly, I think this could should be re-written, I don't think the `**` is necessary.\n\t\tif u, ok := newRV.Interface().(UnmarshalerBinary); ok {\n\t\t\tif traceEnabled {\n\t\t\t\tzlog.Debug(\"using UnmarshalBinary method to decode type\", typeField(\"type\", v))\n\t\t\t}\n\t\t\treturn u.UnmarshalBinary(d)\n\t\t}\n\n\t\trv = reflect.Indirect(newRV)\n\t} else {\n\t\t\/\/ We check if `v` directly is `UnmarshalerBinary` this is to overcome our bad code that\n\t\t\/\/ has problem dealing with non-pointer type, which should still be possible here, by allocating\n\t\t\/\/ the empty value for it can then unmarshalling using the address of it. See comment above about\n\t\t\/\/ `newRV` being turned into `**`.\n\t\t\/\/\n\t\t\/\/ We should re-code all the logic to determine the type and indirection using Golang `json` package\n\t\t\/\/ logic. See here: https:\/\/github.com\/golang\/go\/blob\/54697702e435bddb69c0b76b25b3209c78d2120a\/src\/encoding\/json\/decode.go#L439\n\t\tif u, ok := v.(UnmarshalerBinary); ok {\n\t\t\tif traceEnabled {\n\t\t\t\tzlog.Debug(\"using UnmarshalBinary method to decode type\", typeField(\"type\", v))\n\t\t\t}\n\t\t\treturn u.UnmarshalBinary(d)\n\t\t}\n\t}\n\n\tswitch v.(type) {\n\tcase *string:\n\t\ts, e := d.ReadString()\n\t\tif e != nil {\n\t\t\terr = e\n\t\t\treturn\n\t\t}\n\t\trv.SetString(s)\n\t\treturn\n\tcase *Name, *AccountName, *PermissionName, *ActionName, *TableName, *ScopeName:\n\t\tvar n uint64\n\t\tn, err = d.ReadUint64()\n\t\tname := NameToString(n)\n\t\tif traceEnabled {\n\t\t\tzlog.Debug(\"read name\", zap.String(\"name\", name))\n\t\t}\n\t\trv.SetString(name)\n\t\treturn\n\n\tcase *byte, *P2PMessageType, *TransactionStatus, *CompressionType, *IDListMode, *GoAwayReason:\n\t\tvar n byte\n\t\tn, err = d.ReadByte()\n\t\trv.SetUint(uint64(n))\n\t\treturn\n\tcase *int8:\n\t\tvar n int8\n\t\tn, err = d.ReadInt8()\n\t\trv.SetInt(int64(n))\n\t\treturn\n\tcase *int16:\n\t\tvar n int16\n\t\tn, err = d.ReadInt16()\n\t\trv.SetInt(int64(n))\n\t\treturn\n\tcase *int32:\n\t\tvar n int32\n\t\tn, err = d.ReadInt32()\n\t\trv.SetInt(int64(n))\n\t\treturn\n\tcase *int64:\n\t\tvar n int64\n\t\tn, err = d.ReadInt64()\n\t\trv.SetInt(int64(n))\n\t\treturn\n\tcase *Int64:\n\t\tvar n int64\n\t\tn, err = d.ReadInt64()\n\t\trv.SetInt(int64(n))\n\t\treturn\n\n\t\/\/ This is so hackish, doing it right now, but the decoder needs to handle those\n\t\/\/ case (a struct field that is itself a pointer) by itself.\n\tcase **Uint64:\n\t\tvar n uint64\n\t\tn, err = d.ReadUint64()\n\t\tif err == nil {\n\t\t\trv.Set(reflect.ValueOf((Uint64)(n)))\n\t\t}\n\n\t\treturn\n\tcase *Uint64:\n\t\tvar n uint64\n\t\tn, err = d.ReadUint64()\n\t\trv.SetUint(uint64(n))\n\t\treturn\n\tcase *Float64:\n\t\tvar n float64\n\t\tn, err = d.ReadFloat64()\n\t\trv.SetFloat(n)\n\t\treturn\n\tcase *Uint128:\n\t\tvar n Uint128\n\t\tn, err = d.ReadUint128(\"uint128\")\n\t\trv.Set(reflect.ValueOf(n))\n\t\treturn\n\tcase *Int128:\n\t\tvar n Uint128\n\t\tn, err = d.ReadUint128(\"int128\")\n\t\trv.Set(reflect.ValueOf(Int128(n)))\n\t\treturn\n\tcase *Float128:\n\t\tvar n Uint128\n\t\tn, err = d.ReadUint128(\"float128\")\n\t\trv.Set(reflect.ValueOf(Float128(n)))\n\t\treturn\n\tcase *uint16:\n\t\tvar n uint16\n\t\tn, err = d.ReadUint16()\n\t\trv.SetUint(uint64(n))\n\t\treturn\n\tcase *uint32:\n\t\tvar n uint32\n\t\tn, err = d.ReadUint32()\n\t\trv.SetUint(uint64(n))\n\t\treturn\n\tcase *uint64:\n\t\tvar n uint64\n\t\tn, err = d.ReadUint64()\n\t\trv.SetUint(n)\n\t\treturn\n\tcase *Varuint32:\n\t\tvar r uint64\n\t\tr, err = d.ReadUvarint64()\n\t\trv.SetUint(r)\n\t\treturn\n\tcase *bool:\n\t\tvar r bool\n\t\tr, err = d.ReadBool()\n\t\trv.SetBool(r)\n\t\treturn\n\tcase *Bool:\n\t\tvar r bool\n\t\tr, err = d.ReadBool()\n\t\trv.SetBool(r)\n\t\treturn\n\tcase *HexBytes:\n\t\tvar data []byte\n\t\tdata, err = d.ReadByteArray()\n\t\trv.SetBytes(data)\n\t\treturn\n\tcase *[]byte:\n\t\tvar data []byte\n\t\tdata, err = d.ReadByteArray()\n\t\trv.SetBytes(data)\n\t\treturn\n\tcase *Checksum256:\n\t\tvar s Checksum256\n\t\ts, err = d.ReadChecksum256()\n\t\trv.SetBytes(s)\n\t\treturn\n\tcase *ecc.PublicKey:\n\t\tvar p ecc.PublicKey\n\t\tp, err = d.ReadPublicKey()\n\t\trv.Set(reflect.ValueOf(p))\n\t\treturn\n\tcase *ecc.Signature:\n\t\tvar s ecc.Signature\n\t\ts, err = d.ReadSignature()\n\t\trv.Set(reflect.ValueOf(s))\n\t\treturn\n\tcase *Tstamp:\n\t\tvar ts Tstamp\n\t\tts, err = d.ReadTstamp()\n\t\trv.Set(reflect.ValueOf(ts))\n\t\treturn\n\tcase *TimePoint:\n\t\tvar tp TimePoint\n\t\ttp, err = d.ReadTimePoint()\n\t\trv.Set(reflect.ValueOf(tp))\n\t\treturn\n\tcase *TimePointSec:\n\t\tvar tp TimePointSec\n\t\ttp, err = d.ReadTimePointSec()\n\t\trv.Set(reflect.ValueOf(tp))\n\t\treturn\n\tcase *BlockTimestamp:\n\t\tvar bt BlockTimestamp\n\t\tbt, err = d.ReadBlockTimestamp()\n\t\trv.Set(reflect.ValueOf(bt))\n\t\treturn\n\tcase *JSONTime:\n\t\tvar jt JSONTime\n\t\tjt, err = d.ReadJSONTime()\n\t\trv.Set(reflect.ValueOf(jt))\n\t\treturn\n\tcase *CurrencyName:\n\t\tvar cur CurrencyName\n\t\tcur, err = d.ReadCurrencyName()\n\t\trv.Set(reflect.ValueOf(cur))\n\t\treturn\n\tcase *Symbol:\n\t\tvar symbol *Symbol\n\t\tsymbol, err = d.ReadSymbol()\n\t\trv.Set(reflect.ValueOf(*symbol))\n\t\treturn\n\tcase *Asset:\n\t\tvar asset Asset\n\t\tasset, err = d.ReadAsset()\n\t\trv.Set(reflect.ValueOf(asset))\n\t\treturn\n\tcase *TransactionWithID:\n\t\tt, e := d.ReadByte()\n\t\tif err != nil {\n\t\t\terr = fmt.Errorf(\"failed to read TransactionWithID type byte: %w\", e)\n\t\t\treturn\n\t\t}\n\n\t\tif traceEnabled {\n\t\t\tzlog.Debug(\"type byte value\", zap.Uint8(\"val\", t))\n\t\t}\n\n\t\tif t == 0 {\n\t\t\tid, e := d.ReadChecksum256()\n\t\t\tif err != nil {\n\t\t\t\terr = fmt.Errorf(\"failed to read TransactionWithID id: %w\", e)\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\ttrx := TransactionWithID{ID: id}\n\t\t\trv.Set(reflect.ValueOf(trx))\n\t\t\treturn nil\n\n\t\t} else {\n\t\t\tpackedTrx := &PackedTransaction{}\n\t\t\tif err := d.Decode(packedTrx); err != nil {\n\t\t\t\treturn fmt.Errorf(\"packed transaction: %w\", err)\n\t\t\t}\n\n\t\t\tid, err := packedTrx.ID()\n\t\t\tif err != nil {\n\t\t\t\treturn fmt.Errorf(\"packed transaction id: %w\", err)\n\t\t\t}\n\n\t\t\ttrx := TransactionWithID{ID: id, Packed: packedTrx}\n\t\t\trv.Set(reflect.ValueOf(trx))\n\t\t\treturn nil\n\t\t}\n\n\tcase **Action:\n\t\terr = d.decodeStruct(v, t, rv)\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\t\taction := rv.Interface().(Action)\n\n\t\tif d.decodeActions {\n\t\t\terr = d.ReadActionData(&action)\n\t\t}\n\n\t\trv.Set(reflect.ValueOf(action))\n\t\treturn\n\n\tcase *Packet:\n\n\t\tenvelope, e := d.ReadP2PMessageEnvelope()\n\t\tif e != nil {\n\t\t\terr = fmt.Errorf(\"decode, %s\", e)\n\t\t\treturn\n\t\t}\n\n\t\tif d.decodeP2PMessage {\n\t\t\tattr, ok := envelope.Type.reflectTypes()\n\t\t\tif !ok {\n\t\t\t\treturn fmt.Errorf(\"decode, unknown p2p message type [%d]\", envelope.Type)\n\t\t\t}\n\t\t\tmsg := reflect.New(attr.ReflectType)\n\t\t\tsubDecoder := NewDecoder(envelope.Payload)\n\n\t\t\terr = subDecoder.Decode(msg.Interface())\n\n\t\t\tdecoded := msg.Interface().(P2PMessage)\n\t\t\tenvelope.P2PMessage = decoded\n\t\t}\n\n\t\trv.Set(reflect.ValueOf(*envelope))\n\n\t\treturn\n\t}\n\n\tswitch t.Kind() {\n\tcase reflect.Array:\n\t\tif traceEnabled {\n\t\t\tzlog.Debug(\"reading array\")\n\t\t}\n\t\tlen := t.Len()\n\t\tfor i := 0; i < int(len); i++ {\n\t\t\tif err = d.Decode(rv.Index(i).Addr().Interface()); err != nil {\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t\treturn\n\n\tcase reflect.Slice:\n\t\tvar l uint64\n\t\tif l, err = d.ReadUvarint64(); err != nil {\n\t\t\treturn\n\t\t}\n\t\tif traceEnabled {\n\t\t\tzlog.Debug(\"reading slice\", zap.Uint64(\"len\", l), typeField(\"type\", v))\n\t\t}\n\t\trv.Set(reflect.MakeSlice(t, int(l), int(l)))\n\t\tfor i := 0; i < int(l); i++ {\n\t\t\tif err = d.Decode(rv.Index(i).Addr().Interface()); err != nil {\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\n\tcase reflect.Struct:\n\n\t\terr = d.decodeStruct(v, t, rv)\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\n\tdefault:\n\t\treturn errors.New(\"decode, unsupported type \" + t.String())\n\t}\n\n\treturn\n}\n\nfunc (d *Decoder) decodeStruct(v interface{}, t reflect.Type, rv reflect.Value) (err error) {\n\tl := rv.NumField()\n\n\tseenBinaryExtensionField := false\n\tfor i := 0; i < l; i++ {\n\t\tstructField := t.Field(i)\n\t\ttag := structField.Tag.Get(\"eos\")\n\t\tif tag == \"-\" {\n\t\t\tcontinue\n\t\t}\n\n\t\tif tag != \"binary_extension\" && seenBinaryExtensionField {\n\t\t\tpanic(fmt.Sprintf(\"the `eos: \\\"binary_extension\\\"` tags must be packed together at the end of struct fields, problematic field %s\", structField.Name))\n\t\t}\n\n\t\tif tag == \"binary_extension\" {\n\t\t\tseenBinaryExtensionField = true\n\n\t\t\t\/\/ FIXME: This works only if what is in `d.data` is the actual full data buffer that\n\t\t\t\/\/ needs to be decoded. If there is for example two structs in the buffer, this\n\t\t\t\/\/ will not work as we would continue into the next struct.\n\t\t\t\/\/\n\t\t\t\/\/ But at the same time, does it make sense otherwise? What would be the inference\n\t\t\t\/\/ rule in the case of extra bytes available? Continue decoding and revert if it's\n\t\t\t\/\/ not working? But how to detect valid errors?\n\t\t\tif len(d.data[d.pos:]) <= 0 {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t}\n\n\t\tif v := rv.Field(i); v.CanSet() && structField.Name != \"_\" {\n\t\t\tvar options []DecodeOption\n\t\t\tif tag == \"optional\" {\n\t\t\t\toptions = append(options, OptionalField)\n\t\t\t}\n\n\t\t\tvalue := v.Addr().Interface()\n\n\t\t\tif traceEnabled {\n\t\t\t\tzlog.Debug(\"struct field\", typeField(structField.Name, value), zap.String(\"tag\", tag))\n\t\t\t}\n\n\t\t\tif err = d.Decode(value, options...); err != nil {\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t}\n\treturn\n}\n\nvar ErrVarIntBufferSize = errors.New(\"varint: invalid buffer size\")\n\nfunc (d *Decoder) ReadUvarint64() (uint64, error) {\n\tl, read := binary.Uvarint(d.data[d.pos:])\n\tif read <= 0 {\n\t\treturn l, ErrVarIntBufferSize\n\t}\n\tif traceEnabled {\n\t\tzlog.Debug(\"read uvarint64\", zap.Uint64(\"val\", l))\n\t}\n\td.pos += read\n\treturn l, nil\n}\nfunc (d *Decoder) ReadVarint64() (out int64, err error) {\n\tl, read := binary.Varint(d.data[d.pos:])\n\tif read <= 0 {\n\t\treturn l, ErrVarIntBufferSize\n\t}\n\tif traceEnabled {\n\t\tzlog.Debug(\"read varint\", zap.Int64(\"val\", l))\n\t}\n\td.pos += read\n\treturn l, nil\n}\n\nfunc (d *Decoder) ReadVarint32() (out int32, err error) {\n\tn, err := d.ReadVarint64()\n\tif err != nil {\n\t\treturn out, err\n\t}\n\tout = int32(n)\n\tif traceEnabled {\n\t\tzlog.Debug(\"read varint32\", zap.Int32(\"val\", out))\n\t}\n\treturn\n}\nfunc (d *Decoder) ReadUvarint32() (out uint32, err error) {\n\n\tn, err := d.ReadUvarint64()\n\tif err != nil {\n\t\treturn out, err\n\t}\n\tout = uint32(n)\n\tif traceEnabled {\n\t\tzlog.Debug(\"read uvarint32\", zap.Uint32(\"val\", out))\n\t}\n\treturn\n}\n\nfunc (d *Decoder) ReadByteArray() (out []byte, err error) {\n\n\tl, err := d.ReadUvarint64()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif len(d.data) < d.pos+int(l) {\n\t\treturn nil, fmt.Errorf(\"byte array: varlen=%d, missing %d bytes\", l, d.pos+int(l)-len(d.data))\n\t}\n\n\tout = d.data[d.pos : d.pos+int(l)]\n\td.pos += int(l)\n\tif traceEnabled {\n\t\tzlog.Debug(\"read byte array\", zap.Stringer(\"hex\", HexBytes(out)))\n\t}\n\treturn\n}\n\nfunc (d *Decoder) ReadByte() (out byte, err error) {\n\tif d.remaining() < TypeSize.Byte {\n\t\terr = fmt.Errorf(\"required [1] byte, remaining [%d]\", d.remaining())\n\t\treturn\n\t}\n\n\tout = d.data[d.pos]\n\td.pos++\n\tif traceEnabled {\n\t\tzlog.Debug(\"read byte\", zap.Uint8(\"byte\", out))\n\t}\n\treturn\n}\n\nfunc (d *Decoder) ReadBool() (out bool, err error) {\n\tif d.remaining() < TypeSize.Bool {\n\t\terr = fmt.Errorf(\"bool required [%d] byte, remaining [%d]\", TypeSize.Bool, d.remaining())\n\t\treturn\n\t}\n\n\tb, err := d.ReadByte()\n\n\tif err != nil {\n\t\terr = fmt.Errorf(\"readBool, %s\", err)\n\t}\n\tout = b != 0\n\tif traceEnabled {\n\t\tzlog.Debug(\"read bool\", zap.Bool(\"val\", out))\n\t}\n\treturn\n\n}\n\nfunc (d *Decoder) ReadUint8() (out uint8, err error) {\n\tout, err = d.ReadByte()\n\treturn\n}\n\n\/\/ Deprecated: Use `ReadUint8` (with a lower case `i`) instead\nfunc (d *Decoder) ReadUInt8() (out uint8, err error) {\n\treturn d.ReadUint8()\n}\n\nfunc (d *Decoder) ReadInt8() (out int8, err error) {\n\tb, err := d.ReadByte()\n\tout = int8(b)\n\tif traceEnabled {\n\t\tzlog.Debug(\"read int8\", zap.Int8(\"val\", out))\n\t}\n\treturn\n}\n\nfunc (d *Decoder) ReadUint16() (out uint16, err error) {\n\tif d.remaining() < TypeSize.Uint16 {\n\t\terr = fmt.Errorf(\"uint16 required [%d] bytes, remaining [%d]\", TypeSize.Uint16, d.remaining())\n\t\treturn\n\t}\n\n\tout = binary.LittleEndian.Uint16(d.data[d.pos:])\n\td.pos += TypeSize.Uint16\n\tif traceEnabled {\n\t\tzlog.Debug(\"read uint16\", zap.Uint16(\"val\", out))\n\t}\n\treturn\n}\n\nfunc (d *Decoder) ReadInt16() (out int16, err error) {\n\tn, err := d.ReadUint16()\n\tout = int16(n)\n\tif traceEnabled {\n\t\tzlog.Debug(\"read int16\", zap.Int16(\"val\", out))\n\t}\n\treturn\n}\nfunc (d *Decoder) ReadInt64() (out int64, err error) {\n\tn, err := d.ReadUint64()\n\tout = int64(n)\n\tif traceEnabled {\n\t\tzlog.Debug(\"read int64\", zap.Int64(\"val\", out))\n\t}\n\treturn\n}\n\nfunc (d *Decoder) ReadUint32() (out uint32, err error) {\n\tif d.remaining() < TypeSize.Uint32 {\n\t\terr = fmt.Errorf(\"uint32 required [%d] bytes, remaining [%d]\", TypeSize.Uint32, d.remaining())\n\t\treturn\n\t}\n\n\tout = binary.LittleEndian.Uint32(d.data[d.pos:])\n\td.pos += TypeSize.Uint32\n\tif traceEnabled {\n\t\tzlog.Debug(\"read uint32\", zap.Uint32(\"val\", out))\n\t}\n\treturn\n}\nfunc (d *Decoder) ReadInt32() (out int32, err error) {\n\tn, err := d.ReadUint32()\n\tout = int32(n)\n\tif traceEnabled {\n\t\tzlog.Debug(\"read int32\", zap.Int32(\"val\", out))\n\t}\n\treturn\n}\n\nfunc (d *Decoder) ReadUint64() (out uint64, err error) {\n\tif d.remaining() < TypeSize.Uint64 {\n\t\terr = fmt.Errorf(\"uint64 required [%d] bytes, remaining [%d]\", TypeSize.Uint64, d.remaining())\n\t\treturn\n\t}\n\n\tdata := d.data[d.pos : d.pos+TypeSize.Uint64]\n\tout = binary.LittleEndian.Uint64(data)\n\td.pos += TypeSize.Uint64\n\tif traceEnabled {\n\t\tzlog.Debug(\"read uint64\", zap.Uint64(\"val\", out), zap.Stringer(\"hex\", HexBytes(data)))\n\t}\n\treturn\n}\n\nfunc (d *Decoder) ReadInt128() (out Int128, err error) {\n\tv, err := d.ReadUint128(\"int128\")\n\tif err != nil {\n\t\treturn\n\t}\n\n\treturn Int128(v), nil\n}\n\nfunc (d *Decoder) ReadUint128(typeName string) (out Uint128, err error) {\n\tif d.remaining() < TypeSize.Uint128 {\n\t\terr = fmt.Errorf(\"%s required [%d] bytes, remaining [%d]\", typeName, TypeSize.Uint128, d.remaining())\n\t\treturn\n\t}\n\n\tdata := d.data[d.pos : d.pos+TypeSize.Uint128]\n\tout.Lo = binary.LittleEndian.Uint64(data)\n\tout.Hi = binary.LittleEndian.Uint64(data[8:])\n\n\td.pos += TypeSize.Uint128\n\tif traceEnabled {\n\t\tzlog.Debug(\"read uint128\", zap.Stringer(\"hex\", out), zap.Uint64(\"hi\", out.Hi), zap.Uint64(\"lo\", out.Lo))\n\t}\n\treturn\n}\n\nfunc (d *Decoder) ReadFloat32() (out float32, err error) {\n\tif d.remaining() < TypeSize.Float32 {\n\t\terr = fmt.Errorf(\"float32 required [%d] bytes, remaining [%d]\", TypeSize.Float32, d.remaining())\n\t\treturn\n\t}\n\n\tn := binary.LittleEndian.Uint32(d.data[d.pos:])\n\tout = math.Float32frombits(n)\n\td.pos += TypeSize.Float32\n\tif traceEnabled {\n\t\tzlog.Debug(\"read float32\", zap.Float32(\"val\", out))\n\t}\n\treturn\n}\n\nfunc (d *Decoder) ReadNodeosFloat32() (out float32, err error) {\n\tif d.remaining() < TypeSize.Float32 {\n\t\terr = fmt.Errorf(\"float32 required [%d] bytes, remaining [%d]\", TypeSize.Float32, d.remaining())\n\t\treturn\n\t}\n\n\tn := binary.LittleEndian.Uint32(d.data[d.pos:])\n\tout = math.Float32frombits(n)\n\td.pos += TypeSize.Float32\n\tif traceEnabled {\n\t\tzlog.Debug(\"read float32\", zap.Float32(\"val\", out))\n\t}\n\treturn\n}\n\nfunc (d *Decoder) ReadFloat64() (out float64, err error) {\n\tif d.remaining() < TypeSize.Float64 {\n\t\terr = fmt.Errorf(\"float64 required [%d] bytes, remaining [%d]\", TypeSize.Float64, d.remaining())\n\t\treturn\n\t}\n\n\tn := binary.LittleEndian.Uint64(d.data[d.pos:])\n\tout = math.Float64frombits(n)\n\td.pos += TypeSize.Float64\n\tif traceEnabled {\n\t\tzlog.Debug(\"read Float64\", zap.Float64(\"val\", float64(out)))\n\t}\n\treturn\n}\n\nfunc fixUtf(r rune) rune {\n\tif r == utf8.RuneError {\n\t\treturn '\ufffd'\n\t}\n\treturn r\n}\nfunc (d *Decoder) SafeReadUTF8String() (out string, err error) {\n\tdata, err := d.ReadByteArray()\n\tout = strings.Map(fixUtf, string(data))\n\tif traceEnabled {\n\t\tzlog.Debug(\"read safe UTF8 string\", zap.String(\"val\", out))\n\t}\n\treturn\n}\n\nfunc (d *Decoder) ReadString() (out string, err error) {\n\tdata, err := d.ReadByteArray()\n\tout = string(data)\n\tif traceEnabled {\n\t\tzlog.Debug(\"read string\", zap.String(\"val\", out))\n\t}\n\treturn\n}\n\nfunc (d *Decoder) ReadChecksum160() (out Checksum160, err error) {\n\tif d.remaining() < TypeSize.Checksum160 {\n\t\terr = fmt.Errorf(\"checksum 160 required [%d] bytes, remaining [%d]\", TypeSize.Checksum160, d.remaining())\n\t\treturn\n\t}\n\n\tout = make(Checksum160, TypeSize.Checksum160)\n\tcopy(out, d.data[d.pos:d.pos+TypeSize.Checksum160])\n\td.pos += TypeSize.Checksum160\n\tif traceEnabled {\n\t\tzlog.Debug(\"read checksum160\", zap.Stringer(\"hex\", HexBytes(out)))\n\t}\n\treturn\n}\n\nfunc (d *Decoder) ReadChecksum256() (out Checksum256, err error) {\n\tif d.remaining() < TypeSize.Checksum256 {\n\t\terr = fmt.Errorf(\"checksum 256 required [%d] bytes, remaining [%d]\", TypeSize.Checksum256, d.remaining())\n\t\treturn\n\t}\n\n\tout = make(Checksum256, TypeSize.Checksum256)\n\tcopy(out, d.data[d.pos:d.pos+TypeSize.Checksum256])\n\td.pos += TypeSize.Checksum256\n\tif traceEnabled {\n\t\tzlog.Debug(\"read checksum256\", zap.Stringer(\"hex\", HexBytes(out)))\n\t}\n\treturn\n}\n\nfunc (d *Decoder) ReadChecksum512() (out Checksum512, err error) {\n\tif d.remaining() < TypeSize.Checksum512 {\n\t\terr = fmt.Errorf(\"checksum 512 required [%d] bytes, remaining [%d]\", TypeSize.Checksum512, d.remaining())\n\t\treturn\n\t}\n\n\tout = make(Checksum512, TypeSize.Checksum512)\n\tcopy(out, d.data[d.pos:d.pos+TypeSize.Checksum512])\n\td.pos += TypeSize.Checksum512\n\tif traceEnabled {\n\t\tzlog.Debug(\"read checksum512\", zap.Stringer(\"hex\", HexBytes(out)))\n\t}\n\treturn\n}\n\nfunc (d *Decoder) ReadPublicKey() (out ecc.PublicKey, err error) {\n\ttypeID, err := d.ReadUint8()\n\tif err != nil {\n\t\treturn out, fmt.Errorf(\"unable to read public key type: %w\", err)\n\t}\n\n\tcurveID := ecc.CurveID(typeID)\n\tvar keyMaterial []byte\n\n\tif curveID == ecc.CurveK1 || curveID == ecc.CurveR1 {\n\t\t\/\/ Minus 1 because we already read the curveID which is 1 out of the 34 bytes of a full \"legacy\" PublicKey\n\t\tkeyMaterial, err = d.readPublicKeyMaterial(curveID, TypeSize.PublicKey-1)\n\t} else if curveID == ecc.CurveWA {\n\t\tkeyMaterial, err = d.readWAPublicKeyMaterial()\n\t} else {\n\t\terr = fmt.Errorf(\"unsupported curve ID %d (%s)\", uint8(curveID), curveID)\n\t}\n\n\tif err != nil {\n\t\treturn out, fmt.Errorf(\"unable to read public key material for curve %s: %w\", curveID, err)\n\t}\n\n\tdata := append([]byte{byte(curveID)}, keyMaterial...)\n\tout, err = ecc.NewPublicKeyFromData(data)\n\tif err != nil {\n\t\treturn out, fmt.Errorf(\"new public key from data: %w\", err)\n\t}\n\n\tif traceEnabled {\n\t\tzlog.Debug(\"read public key\", zap.Stringer(\"pubkey\", out))\n\t}\n\n\treturn\n}\n\nfunc (d *Decoder) readPublicKeyMaterial(curveID ecc.CurveID, keyMaterialSize int) (out []byte, err error) {\n\tif d.remaining() < keyMaterialSize {\n\t\terr = fmt.Errorf(\"publicKey %s key material requires [%d] bytes, remaining [%d]\", curveID, keyMaterialSize, d.remaining())\n\t\treturn\n\t}\n\n\tout = make([]byte, keyMaterialSize)\n\tcopy(out, d.data[d.pos:d.pos+keyMaterialSize])\n\td.pos += keyMaterialSize\n\n\treturn\n}\n\nfunc (d *Decoder) readWAPublicKeyMaterial() (out []byte, err error) {\n\tbegin := d.pos\n\tif d.remaining() < 35 {\n\t\terr = fmt.Errorf(\"publicKey WA key material requires at least [35] bytes, remaining [%d]\", d.remaining())\n\t\treturn\n\t}\n\n\td.pos += 34\n\tremainderDataSize, err := d.ReadUvarint32()\n\tif err != nil {\n\t\treturn out, fmt.Errorf(\"unable to read public key WA key material size: %w\", err)\n\t}\n\n\tif d.remaining() < int(remainderDataSize) {\n\t\terr = fmt.Errorf(\"publicKey WA remainder key material requires [%d] bytes, remaining [%d]\", remainderDataSize, d.remaining())\n\t\treturn\n\t}\n\n\td.pos += int(remainderDataSize)\n\tkeyMaterialSize := d.pos - begin\n\n\tout = make([]byte, keyMaterialSize)\n\tcopy(out, d.data[begin:begin+keyMaterialSize])\n\n\treturn\n}\n\nfunc (d *Decoder) ReadSignature() (out ecc.Signature, err error) {\n\ttypeID, err := d.ReadUint8()\n\tif err != nil {\n\t\treturn out, fmt.Errorf(\"unable to read signature type: %w\", err)\n\t}\n\n\tcurveID := ecc.CurveID(typeID)\n\tif traceEnabled {\n\t\tzlog.Debug(\"read signature curve id\", zap.Stringer(\"curve\", curveID))\n\t}\n\n\tvar data []byte\n\tif curveID == ecc.CurveK1 || curveID == ecc.CurveR1 {\n\t\t\/\/ Minus 1 because we already read the curveID which is 1 out of the 34 bytes of a full \"legacy\" PublicKey\n\t\tif d.remaining() < TypeSize.Signature-1 {\n\t\t\treturn out, fmt.Errorf(\"signature required [%d] bytes, remaining [%d]\", TypeSize.Signature-1, d.remaining())\n\t\t}\n\n\t\tdata = make([]byte, 66)\n\t\tdata[0] = byte(curveID)\n\t\tcopy(data[1:], d.data[d.pos:d.pos+TypeSize.Signature-1])\n\t\tif traceEnabled {\n\t\t\tzlog.Debug(\"read signature data\", zap.Stringer(\"data\", HexBytes(data)))\n\t\t}\n\n\t\td.pos += TypeSize.Signature - 1\n\t} else if curveID == ecc.CurveWA {\n\t\tdata, err = d.readWASignatureData()\n\t\tif err != nil {\n\t\t\treturn out, fmt.Errorf(\"unable to read WA signature: %w\", err)\n\t\t}\n\t} else {\n\t\treturn out, fmt.Errorf(\"unsupported curve ID %d (%s)\", uint8(curveID), curveID)\n\t}\n\n\tout, err = ecc.NewSignatureFromData(data)\n\tif err != nil {\n\t\treturn out, fmt.Errorf(\"new signature: %w\", err)\n\t}\n\n\tif traceEnabled {\n\t\tzlog.Debug(\"read signature\", zap.Stringer(\"sig\", out))\n\t}\n\n\treturn\n}\n\nfunc (d *Decoder) readWASignatureData() (out []byte, err error) {\n\tbegin := d.pos\n\tif d.remaining() < 66 {\n\t\terr = fmt.Errorf(\"signature WA key material requires at least [66] bytes, remaining [%d]\", d.remaining())\n\t\treturn\n\t}\n\n\t\/\/ Skip key recover param id (1 byte), R value (32 bytes) and S value (32 bytes)\n\td.pos += 65\n\tauthenticatorDataSize, err := d.ReadUvarint32()\n\tif err != nil {\n\t\treturn out, fmt.Errorf(\"unable to read signature WA authenticator data size: %w\", err)\n\t}\n\n\tif d.remaining() < int(authenticatorDataSize) {\n\t\terr = fmt.Errorf(\"signature WA authenticator data requires [%d] bytes, remaining [%d]\", authenticatorDataSize, d.remaining())\n\t\treturn\n\t}\n\td.pos += int(authenticatorDataSize)\n\n\tclientDataJSONSize, err := d.ReadUvarint32()\n\tif err != nil {\n\t\treturn out, fmt.Errorf(\"unable to read signature WA client data JSON size: %w\", err)\n\t}\n\n\tif d.remaining() < int(clientDataJSONSize) {\n\t\terr = fmt.Errorf(\"signature WA client data JSON requires [%d] bytes, remaining [%d]\", clientDataJSONSize, d.remaining())\n\t\treturn\n\t}\n\td.pos += int(clientDataJSONSize)\n\n\tsignatureMaterialSize := d.pos - begin\n\n\tout = make([]byte, signatureMaterialSize+1)\n\tout[0] = byte(ecc.CurveWA)\n\tcopy(out[1:], d.data[begin:begin+signatureMaterialSize])\n\tif traceEnabled {\n\t\tzlog.Debug(\"read wa signature data\", zap.Stringer(\"data\", HexBytes(out)))\n\t}\n\n\treturn\n}\n\nfunc (d *Decoder) ReadTstamp() (out Tstamp, err error) {\n\tif d.remaining() < TypeSize.Tstamp {\n\t\terr = fmt.Errorf(\"tstamp required [%d] bytes, remaining [%d]\", TypeSize.Tstamp, d.remaining())\n\t\treturn\n\t}\n\n\tunixNano, err := d.ReadUint64()\n\tout.Time = time.Unix(0, int64(unixNano))\n\tif traceEnabled {\n\t\tzlog.Debug(\"read tstamp\", zap.Time(\"time\", out.Time))\n\t}\n\treturn\n}\n\nfunc (d *Decoder) ReadBlockTimestamp() (out BlockTimestamp, err error) {\n\tif d.remaining() < TypeSize.BlockTimestamp {\n\t\terr = fmt.Errorf(\"blockTimestamp required [%d] bytes, remaining [%d]\", TypeSize.BlockTimestamp, d.remaining())\n\t\treturn\n\t}\n\n\t\/\/ Encoded value of block timestamp is the slot, which represents the amount of 500 ms that\n\t\/\/ has elapsed since block epoch which is Januaray 1st, 2000 (946684800000 Unix Timestamp Milliseconds)\n\tn, err := d.ReadUint32()\n\tmilliseconds := int64(n)*500 + 946684800000\n\n\tout.Time = time.Unix(0, milliseconds*1000*1000)\n\tif traceEnabled {\n\t\tzlog.Debug(\"read block timestamp\", zap.Time(\"time\", out.Time))\n\t}\n\treturn\n}\n\nfunc (d *Decoder) ReadTimePoint() (out TimePoint, err error) {\n\tn, err := d.ReadUint64()\n\tout = TimePoint(n)\n\tif traceEnabled {\n\t\tzlog.Debug(\"read TimePoint\", zap.Uint64(\"us\", uint64(out)))\n\t}\n\treturn\n\n}\nfunc (d *Decoder) ReadTimePointSec() (out TimePointSec, err error) {\n\tn, err := d.ReadUint32()\n\tout = TimePointSec(n)\n\tif traceEnabled {\n\t\tzlog.Debug(\"read TimePointSec\", zap.Uint32(\"secs\", uint32(out)))\n\t}\n\treturn\n\n}\n\nfunc (d *Decoder) ReadJSONTime() (jsonTime JSONTime, err error) {\n\tn, err := d.ReadUint32()\n\tjsonTime = JSONTime{time.Unix(int64(n), 0).UTC()}\n\tif traceEnabled {\n\t\tzlog.Debug(\"read json time\", zap.Time(\"time\", jsonTime.Time))\n\t}\n\treturn\n}\n\nfunc (d *Decoder) ReadName() (out Name, err error) {\n\tn, err := d.ReadUint64()\n\tout = Name(NameToString(n))\n\tif traceEnabled {\n\t\tzlog.Debug(\"read name\", zap.String(\"name\", string(out)))\n\t}\n\treturn\n}\n\nfunc (d *Decoder) ReadCurrencyName() (out CurrencyName, err error) {\n\tdata := d.data[d.pos : d.pos+TypeSize.CurrencyName]\n\td.pos += TypeSize.CurrencyName\n\tout = CurrencyName(strings.TrimRight(string(data), \"\\x00\"))\n\tif traceEnabled {\n\t\tzlog.Debug(\"read currency name\", zap.String(\"name\", string(out)))\n\t}\n\treturn\n}\n\nfunc (d *Decoder) ReadAsset() (out Asset, err error) {\n\n\tamount, err := d.ReadInt64()\n\tprecision, err := d.ReadByte()\n\tif err != nil {\n\t\treturn out, fmt.Errorf(\"readSymbol precision, %s\", err)\n\t}\n\n\tif d.remaining() < 7 {\n\t\terr = fmt.Errorf(\"asset symbol required [%d] bytes, remaining [%d]\", 7, d.remaining())\n\t\treturn\n\t}\n\n\tdata := d.data[d.pos : d.pos+7]\n\td.pos += 7\n\n\tout = Asset{}\n\tout.Amount = Int64(amount)\n\tout.Precision = precision\n\tout.Symbol.Symbol = strings.TrimRight(string(data), \"\\x00\")\n\tif traceEnabled {\n\t\tzlog.Debug(\"read asset\", zap.Stringer(\"value\", out))\n\t}\n\treturn\n}\n\nfunc (d *Decoder) ReadExtendedAsset() (out ExtendedAsset, err error) {\n\tasset, err := d.ReadAsset()\n\tif err != nil {\n\t\treturn out, fmt.Errorf(\"read extended asset: read asset: %w\", err)\n\t}\n\n\tcontract, err := d.ReadName()\n\tif err != nil {\n\t\treturn out, fmt.Errorf(\"read extended asset: read name: %w\", err)\n\t}\n\n\textendedAsset := ExtendedAsset{\n\t\tAsset: asset,\n\t\tContract: AccountName(contract),\n\t}\n\n\tif traceEnabled {\n\t\tzlog.Debug(\"read extended asset\")\n\t}\n\n\treturn extendedAsset, err\n}\n\nfunc (d *Decoder) ReadSymbol() (out *Symbol, err error) {\n\trawValue, err := d.ReadUint64()\n\tif err != nil {\n\t\treturn out, fmt.Errorf(\"read symbol: %w\", err)\n\t}\n\n\tprecision := uint8(rawValue & 0xFF)\n\tsymbolCode := SymbolCode(rawValue >> 8).String()\n\n\tout = &Symbol{\n\t\tPrecision: precision,\n\t\tSymbol: symbolCode,\n\t}\n\n\tif traceEnabled {\n\t\tzlog.Debug(\"read symbol\", zap.String(\"symbol\", symbolCode), zap.Uint8(\"precision\", precision))\n\t}\n\treturn\n}\n\nfunc (d *Decoder) ReadSymbolCode() (out SymbolCode, err error) {\n\tn, err := d.ReadUint64()\n\tout = SymbolCode(n)\n\n\tif traceEnabled {\n\t\tzlog.Debug(\"read symbol code\")\n\t}\n\treturn\n}\n\nfunc (d *Decoder) ReadActionData(action *Action) (err error) {\n\tactionMap := RegisteredActions[action.Account]\n\n\tvar decodeInto reflect.Type\n\tif actionMap != nil {\n\n\t\tobjType := actionMap[action.Name]\n\t\tif objType != nil {\n\t\t\tif traceEnabled {\n\t\t\t\tzlog.Debug(\"read object\", zap.String(\"type\", objType.Name()))\n\t\t\t}\n\t\t\tdecodeInto = objType\n\t\t}\n\t}\n\tif decodeInto == nil {\n\t\treturn\n\t}\n\n\tif traceEnabled {\n\t\tzlog.Debug(\"reflect type\", zap.String(\"type\", decodeInto.Name()))\n\t}\n\tobj := reflect.New(decodeInto)\n\tiface := obj.Interface()\n\tif traceEnabled {\n\t\tzlog.Debug(\"reflect object\", typeField(\"type\", iface), zap.Reflect(\"obj\", obj))\n\t}\n\terr = UnmarshalBinary(action.ActionData.HexData, iface)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"decoding Action [%s], %s\", obj.Type().Name(), err)\n\t}\n\n\taction.ActionData.Data = iface\n\n\treturn\n}\n\nfunc (d *Decoder) ReadP2PMessageEnvelope() (out *Packet, err error) {\n\n\tout = &Packet{}\n\tl, err := d.ReadUint32()\n\tif err != nil {\n\t\terr = fmt.Errorf(\"p2p envelope length: %w\", err)\n\t\treturn\n\t}\n\tout.Length = l\n\tb, err := d.ReadByte()\n\tif err != nil {\n\t\terr = fmt.Errorf(\"p2p envelope type: %w\", err)\n\t\treturn\n\t}\n\tout.Type = P2PMessageType(b)\n\n\tpayloadLength := int(l - 1)\n\tif d.remaining() < payloadLength {\n\t\terr = fmt.Errorf(\"p2p envelope payload required [%d] bytes, remaining [%d]\", l, d.remaining())\n\t\treturn\n\t}\n\n\tout.Payload = make([]byte, int(payloadLength))\n\tcopy(out.Payload, d.data[d.pos:d.pos+int(payloadLength)])\n\n\td.pos += int(out.Length)\n\treturn\n}\n\nfunc (d *Decoder) remaining() int {\n\treturn len(d.data) - d.pos\n}\n\nfunc (d *Decoder) hasRemaining() bool {\n\treturn d.remaining() > 0\n}\n\nfunc UnmarshalBinaryReader(reader io.Reader, v interface{}) (err error) {\n\tdata, err := ioutil.ReadAll(reader)\n\tif err != nil {\n\t\treturn\n\t}\n\treturn UnmarshalBinary(data, v)\n}\n\nfunc UnmarshalBinary(data []byte, v interface{}) (err error) {\n\tdecoder := NewDecoder(data)\n\treturn decoder.Decode(v)\n}\n","avg_line_length":23.8899923606,"max_line_length":153,"alphanum_fraction":0.6666986442} +{"size":2598,"ext":"go","lang":"Go","max_stars_count":null,"content":"package main\n\nimport (\n\t\"context\"\n\t\"encoding\/json\"\n\t\"fmt\"\n\t\"net\/url\"\n\t\"os\"\n\t\"time\"\n\n\t\"github.com\/ChimeraCoder\/anaconda\"\n\t\"github.com\/pkg\/errors\"\n\t\"google.golang.org\/appengine\/memcache\"\n\t\"google.golang.org\/appengine\/urlfetch\"\n)\n\nfunc twAPI(ctx context.Context) *anaconda.TwitterApi {\n\tapi := anaconda.NewTwitterApiWithCredentials(\n\t\tos.Getenv(\"TWITTER_ACCESS_TOKEN\"),\n\t\tos.Getenv(\"TWITTER_ACCESS_TOKEN_SECRET\"),\n\t\tos.Getenv(\"TWITTER_CONSUMER_KEY\"),\n\t\tos.Getenv(\"TWITTER_CONSUMER_SECRET\"))\n\tapi.HttpClient.Transport = &urlfetch.Transport{Context: ctx}\n\treturn api\n}\n\nfunc getTweetsFromMemcache(ctx context.Context, cacheID string) ([]anaconda.Tweet, error) {\n\titem, err := memcache.Get(ctx, cacheID)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tvar tweets []anaconda.Tweet\n\terr = json.Unmarshal(item.Value, &tweets)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"failed to unmarshal cache data\")\n\t}\n\treturn tweets, nil\n}\n\nfunc getTweetsFromTwitter(ctx context.Context, listID int64) ([]anaconda.Tweet, error) {\n\tapi := twAPI(ctx)\n\ttweets, err := api.GetListTweets(listID, false, url.Values{\"count\": []string{\"100\"}})\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"could not get tweets from twitter\")\n\t}\n\treturn tweets, nil\n}\n\nfunc updateMemcacheTweets(ctx context.Context, listID int64, cacheID string, expiration time.Duration) ([]anaconda.Tweet, error) {\n\ttweets, err := getTweetsFromMemcache(ctx, cacheID)\n\tif err == nil {\n\t\t\/\/ already updated\n\t\treturn tweets, nil\n\t}\n\tif err != memcache.ErrCacheMiss {\n\t\treturn nil, err\n\t}\n\n\ttweets, err = getTweetsFromTwitter(ctx, listID)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tb, err := json.Marshal(tweets)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"failed to marshal tweets\")\n\t}\n\n\terr = memcache.Set(ctx, &memcache.Item{\n\t\tKey: cacheID,\n\t\tValue: b,\n\t\tExpiration: expiration,\n\t})\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"failed to write tweet caches to memcache\")\n\t}\n\treturn tweets, nil\n}\n\nfunc getTweets(ctx context.Context, listID int64, expiration time.Duration) ([]anaconda.Tweet, error) {\n\tcacheID := fmt.Sprintf(\"list%d\", listID)\n\ttweets, err := getTweetsFromMemcache(ctx, cacheID)\n\tif err == nil {\n\t\treturn tweets, nil\n\t}\n\tif err != nil && err != memcache.ErrCacheMiss {\n\t\treturn nil, errors.Wrap(err, \"failed to get tweets from cache\")\n\t}\n\tvar err2 error\n\terr = transaction(ctx, cacheID+\"-mutex\", func() {\n\t\ttweets, err2 = updateMemcacheTweets(ctx, listID, cacheID, expiration)\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif err2 != nil {\n\t\treturn nil, errors.Wrap(err2, \"failed to update tweet cache\")\n\t}\n\treturn tweets, nil\n}\n","avg_line_length":25.7227722772,"max_line_length":130,"alphanum_fraction":0.7001539646} +{"size":61532,"ext":"go","lang":"Go","max_stars_count":1139.0,"content":"\/\/ Code generated by protoc-gen-validate. DO NOT EDIT.\n\/\/ source: envoy\/extensions\/filters\/network\/http_connection_manager\/v3\/http_connection_manager.proto\n\npackage envoy_extensions_filters_network_http_connection_manager_v3\n\nimport (\n\t\"bytes\"\n\t\"errors\"\n\t\"fmt\"\n\t\"net\"\n\t\"net\/mail\"\n\t\"net\/url\"\n\t\"regexp\"\n\t\"strings\"\n\t\"time\"\n\t\"unicode\/utf8\"\n\n\t\"google.golang.org\/protobuf\/types\/known\/anypb\"\n)\n\n\/\/ ensure the imports are used\nvar (\n\t_ = bytes.MinRead\n\t_ = errors.New(\"\")\n\t_ = fmt.Print\n\t_ = utf8.UTFMax\n\t_ = (*regexp.Regexp)(nil)\n\t_ = (*strings.Reader)(nil)\n\t_ = net.IPv4len\n\t_ = time.Duration(0)\n\t_ = (*url.URL)(nil)\n\t_ = (*mail.Address)(nil)\n\t_ = anypb.Any{}\n)\n\n\/\/ Validate checks the field values on HttpConnectionManager with the rules\n\/\/ defined in the proto definition for this message. If any rules are\n\/\/ violated, an error is returned.\nfunc (m *HttpConnectionManager) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\tif _, ok := HttpConnectionManager_CodecType_name[int32(m.GetCodecType())]; !ok {\n\t\treturn HttpConnectionManagerValidationError{\n\t\t\tfield: \"CodecType\",\n\t\t\treason: \"value must be one of the defined enum values\",\n\t\t}\n\t}\n\n\tif utf8.RuneCountInString(m.GetStatPrefix()) < 1 {\n\t\treturn HttpConnectionManagerValidationError{\n\t\t\tfield: \"StatPrefix\",\n\t\t\treason: \"value length must be at least 1 runes\",\n\t\t}\n\t}\n\n\tfor idx, item := range m.GetHttpFilters() {\n\t\t_, _ = idx, item\n\n\t\tif v, ok := interface{}(item).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn HttpConnectionManagerValidationError{\n\t\t\t\t\tfield: fmt.Sprintf(\"HttpFilters[%v]\", idx),\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\n\tif v, ok := interface{}(m.GetAddUserAgent()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn HttpConnectionManagerValidationError{\n\t\t\t\tfield: \"AddUserAgent\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\tif v, ok := interface{}(m.GetTracing()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn HttpConnectionManagerValidationError{\n\t\t\t\tfield: \"Tracing\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\tif v, ok := interface{}(m.GetCommonHttpProtocolOptions()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn HttpConnectionManagerValidationError{\n\t\t\t\tfield: \"CommonHttpProtocolOptions\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\tif v, ok := interface{}(m.GetHttpProtocolOptions()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn HttpConnectionManagerValidationError{\n\t\t\t\tfield: \"HttpProtocolOptions\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\tif v, ok := interface{}(m.GetHttp2ProtocolOptions()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn HttpConnectionManagerValidationError{\n\t\t\t\tfield: \"Http2ProtocolOptions\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\tif v, ok := interface{}(m.GetHttp3ProtocolOptions()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn HttpConnectionManagerValidationError{\n\t\t\t\tfield: \"Http3ProtocolOptions\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\tif !_HttpConnectionManager_ServerName_Pattern.MatchString(m.GetServerName()) {\n\t\treturn HttpConnectionManagerValidationError{\n\t\t\tfield: \"ServerName\",\n\t\t\treason: \"value does not match regex pattern \\\"^[^\\\\x00\\\\n\\\\r]*$\\\"\",\n\t\t}\n\t}\n\n\tif _, ok := HttpConnectionManager_ServerHeaderTransformation_name[int32(m.GetServerHeaderTransformation())]; !ok {\n\t\treturn HttpConnectionManagerValidationError{\n\t\t\tfield: \"ServerHeaderTransformation\",\n\t\t\treason: \"value must be one of the defined enum values\",\n\t\t}\n\t}\n\n\tif v, ok := interface{}(m.GetSchemeHeaderTransformation()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn HttpConnectionManagerValidationError{\n\t\t\t\tfield: \"SchemeHeaderTransformation\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\tif wrapper := m.GetMaxRequestHeadersKb(); wrapper != nil {\n\n\t\tif val := wrapper.GetValue(); val <= 0 || val > 8192 {\n\t\t\treturn HttpConnectionManagerValidationError{\n\t\t\t\tfield: \"MaxRequestHeadersKb\",\n\t\t\t\treason: \"value must be inside range (0, 8192]\",\n\t\t\t}\n\t\t}\n\n\t}\n\n\tif v, ok := interface{}(m.GetStreamIdleTimeout()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn HttpConnectionManagerValidationError{\n\t\t\t\tfield: \"StreamIdleTimeout\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\tif v, ok := interface{}(m.GetRequestTimeout()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn HttpConnectionManagerValidationError{\n\t\t\t\tfield: \"RequestTimeout\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\tif d := m.GetRequestHeadersTimeout(); d != nil {\n\t\tdur, err := d.AsDuration(), d.CheckValid()\n\t\tif err != nil {\n\t\t\treturn HttpConnectionManagerValidationError{\n\t\t\t\tfield: \"RequestHeadersTimeout\",\n\t\t\t\treason: \"value is not a valid duration\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\n\t\tgte := time.Duration(0*time.Second + 0*time.Nanosecond)\n\n\t\tif dur < gte {\n\t\t\treturn HttpConnectionManagerValidationError{\n\t\t\t\tfield: \"RequestHeadersTimeout\",\n\t\t\t\treason: \"value must be greater than or equal to 0s\",\n\t\t\t}\n\t\t}\n\n\t}\n\n\tif v, ok := interface{}(m.GetDrainTimeout()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn HttpConnectionManagerValidationError{\n\t\t\t\tfield: \"DrainTimeout\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\tif v, ok := interface{}(m.GetDelayedCloseTimeout()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn HttpConnectionManagerValidationError{\n\t\t\t\tfield: \"DelayedCloseTimeout\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\tfor idx, item := range m.GetAccessLog() {\n\t\t_, _ = idx, item\n\n\t\tif v, ok := interface{}(item).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn HttpConnectionManagerValidationError{\n\t\t\t\t\tfield: fmt.Sprintf(\"AccessLog[%v]\", idx),\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\n\tif v, ok := interface{}(m.GetUseRemoteAddress()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn HttpConnectionManagerValidationError{\n\t\t\t\tfield: \"UseRemoteAddress\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\t\/\/ no validation rules for XffNumTrustedHops\n\n\tfor idx, item := range m.GetOriginalIpDetectionExtensions() {\n\t\t_, _ = idx, item\n\n\t\tif v, ok := interface{}(item).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn HttpConnectionManagerValidationError{\n\t\t\t\t\tfield: fmt.Sprintf(\"OriginalIpDetectionExtensions[%v]\", idx),\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\n\tif v, ok := interface{}(m.GetInternalAddressConfig()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn HttpConnectionManagerValidationError{\n\t\t\t\tfield: \"InternalAddressConfig\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\t\/\/ no validation rules for SkipXffAppend\n\n\tif !_HttpConnectionManager_Via_Pattern.MatchString(m.GetVia()) {\n\t\treturn HttpConnectionManagerValidationError{\n\t\t\tfield: \"Via\",\n\t\t\treason: \"value does not match regex pattern \\\"^[^\\\\x00\\\\n\\\\r]*$\\\"\",\n\t\t}\n\t}\n\n\tif v, ok := interface{}(m.GetGenerateRequestId()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn HttpConnectionManagerValidationError{\n\t\t\t\tfield: \"GenerateRequestId\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\t\/\/ no validation rules for PreserveExternalRequestId\n\n\t\/\/ no validation rules for AlwaysSetRequestIdInResponse\n\n\tif _, ok := HttpConnectionManager_ForwardClientCertDetails_name[int32(m.GetForwardClientCertDetails())]; !ok {\n\t\treturn HttpConnectionManagerValidationError{\n\t\t\tfield: \"ForwardClientCertDetails\",\n\t\t\treason: \"value must be one of the defined enum values\",\n\t\t}\n\t}\n\n\tif v, ok := interface{}(m.GetSetCurrentClientCertDetails()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn HttpConnectionManagerValidationError{\n\t\t\t\tfield: \"SetCurrentClientCertDetails\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\t\/\/ no validation rules for Proxy_100Continue\n\n\t\/\/ no validation rules for RepresentIpv4RemoteAddressAsIpv4MappedIpv6\n\n\tfor idx, item := range m.GetUpgradeConfigs() {\n\t\t_, _ = idx, item\n\n\t\tif v, ok := interface{}(item).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn HttpConnectionManagerValidationError{\n\t\t\t\t\tfield: fmt.Sprintf(\"UpgradeConfigs[%v]\", idx),\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\n\tif v, ok := interface{}(m.GetNormalizePath()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn HttpConnectionManagerValidationError{\n\t\t\t\tfield: \"NormalizePath\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\t\/\/ no validation rules for MergeSlashes\n\n\t\/\/ no validation rules for PathWithEscapedSlashesAction\n\n\tif v, ok := interface{}(m.GetRequestIdExtension()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn HttpConnectionManagerValidationError{\n\t\t\t\tfield: \"RequestIdExtension\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\tif v, ok := interface{}(m.GetLocalReplyConfig()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn HttpConnectionManagerValidationError{\n\t\t\t\tfield: \"LocalReplyConfig\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\t\/\/ no validation rules for StripMatchingHostPort\n\n\tif v, ok := interface{}(m.GetStreamErrorOnInvalidHttpMessage()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn HttpConnectionManagerValidationError{\n\t\t\t\tfield: \"StreamErrorOnInvalidHttpMessage\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\tif v, ok := interface{}(m.GetPathNormalizationOptions()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn HttpConnectionManagerValidationError{\n\t\t\t\tfield: \"PathNormalizationOptions\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\t\/\/ no validation rules for StripTrailingHostDot\n\n\tif v, ok := interface{}(m.GetHiddenEnvoyDeprecatedIdleTimeout()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn HttpConnectionManagerValidationError{\n\t\t\t\tfield: \"HiddenEnvoyDeprecatedIdleTimeout\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\tswitch m.RouteSpecifier.(type) {\n\n\tcase *HttpConnectionManager_Rds:\n\n\t\tif v, ok := interface{}(m.GetRds()).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn HttpConnectionManagerValidationError{\n\t\t\t\t\tfield: \"Rds\",\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\tcase *HttpConnectionManager_RouteConfig:\n\n\t\tif v, ok := interface{}(m.GetRouteConfig()).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn HttpConnectionManagerValidationError{\n\t\t\t\t\tfield: \"RouteConfig\",\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\tcase *HttpConnectionManager_ScopedRoutes:\n\n\t\tif v, ok := interface{}(m.GetScopedRoutes()).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn HttpConnectionManagerValidationError{\n\t\t\t\t\tfield: \"ScopedRoutes\",\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\tdefault:\n\t\treturn HttpConnectionManagerValidationError{\n\t\t\tfield: \"RouteSpecifier\",\n\t\t\treason: \"value is required\",\n\t\t}\n\n\t}\n\n\tswitch m.StripPortMode.(type) {\n\n\tcase *HttpConnectionManager_StripAnyHostPort:\n\t\t\/\/ no validation rules for StripAnyHostPort\n\n\t}\n\n\treturn nil\n}\n\n\/\/ HttpConnectionManagerValidationError is the validation error returned by\n\/\/ HttpConnectionManager.Validate if the designated constraints aren't met.\ntype HttpConnectionManagerValidationError struct {\n\tfield string\n\treason string\n\tcause error\n\tkey bool\n}\n\n\/\/ Field function returns field value.\nfunc (e HttpConnectionManagerValidationError) Field() string { return e.field }\n\n\/\/ Reason function returns reason value.\nfunc (e HttpConnectionManagerValidationError) Reason() string { return e.reason }\n\n\/\/ Cause function returns cause value.\nfunc (e HttpConnectionManagerValidationError) Cause() error { return e.cause }\n\n\/\/ Key function returns key value.\nfunc (e HttpConnectionManagerValidationError) Key() bool { return e.key }\n\n\/\/ ErrorName returns error name.\nfunc (e HttpConnectionManagerValidationError) ErrorName() string {\n\treturn \"HttpConnectionManagerValidationError\"\n}\n\n\/\/ Error satisfies the builtin error interface\nfunc (e HttpConnectionManagerValidationError) Error() string {\n\tcause := \"\"\n\tif e.cause != nil {\n\t\tcause = fmt.Sprintf(\" | caused by: %v\", e.cause)\n\t}\n\n\tkey := \"\"\n\tif e.key {\n\t\tkey = \"key for \"\n\t}\n\n\treturn fmt.Sprintf(\n\t\t\"invalid %sHttpConnectionManager.%s: %s%s\",\n\t\tkey,\n\t\te.field,\n\t\te.reason,\n\t\tcause)\n}\n\nvar _ error = HttpConnectionManagerValidationError{}\n\nvar _ interface {\n\tField() string\n\tReason() string\n\tKey() bool\n\tCause() error\n\tErrorName() string\n} = HttpConnectionManagerValidationError{}\n\nvar _HttpConnectionManager_ServerName_Pattern = regexp.MustCompile(\"^[^\\x00\\n\\r]*$\")\n\nvar _HttpConnectionManager_Via_Pattern = regexp.MustCompile(\"^[^\\x00\\n\\r]*$\")\n\n\/\/ Validate checks the field values on LocalReplyConfig with the rules defined\n\/\/ in the proto definition for this message. If any rules are violated, an\n\/\/ error is returned.\nfunc (m *LocalReplyConfig) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\tfor idx, item := range m.GetMappers() {\n\t\t_, _ = idx, item\n\n\t\tif v, ok := interface{}(item).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn LocalReplyConfigValidationError{\n\t\t\t\t\tfield: fmt.Sprintf(\"Mappers[%v]\", idx),\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\n\tif v, ok := interface{}(m.GetBodyFormat()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn LocalReplyConfigValidationError{\n\t\t\t\tfield: \"BodyFormat\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}\n\n\/\/ LocalReplyConfigValidationError is the validation error returned by\n\/\/ LocalReplyConfig.Validate if the designated constraints aren't met.\ntype LocalReplyConfigValidationError struct {\n\tfield string\n\treason string\n\tcause error\n\tkey bool\n}\n\n\/\/ Field function returns field value.\nfunc (e LocalReplyConfigValidationError) Field() string { return e.field }\n\n\/\/ Reason function returns reason value.\nfunc (e LocalReplyConfigValidationError) Reason() string { return e.reason }\n\n\/\/ Cause function returns cause value.\nfunc (e LocalReplyConfigValidationError) Cause() error { return e.cause }\n\n\/\/ Key function returns key value.\nfunc (e LocalReplyConfigValidationError) Key() bool { return e.key }\n\n\/\/ ErrorName returns error name.\nfunc (e LocalReplyConfigValidationError) ErrorName() string { return \"LocalReplyConfigValidationError\" }\n\n\/\/ Error satisfies the builtin error interface\nfunc (e LocalReplyConfigValidationError) Error() string {\n\tcause := \"\"\n\tif e.cause != nil {\n\t\tcause = fmt.Sprintf(\" | caused by: %v\", e.cause)\n\t}\n\n\tkey := \"\"\n\tif e.key {\n\t\tkey = \"key for \"\n\t}\n\n\treturn fmt.Sprintf(\n\t\t\"invalid %sLocalReplyConfig.%s: %s%s\",\n\t\tkey,\n\t\te.field,\n\t\te.reason,\n\t\tcause)\n}\n\nvar _ error = LocalReplyConfigValidationError{}\n\nvar _ interface {\n\tField() string\n\tReason() string\n\tKey() bool\n\tCause() error\n\tErrorName() string\n} = LocalReplyConfigValidationError{}\n\n\/\/ Validate checks the field values on ResponseMapper with the rules defined in\n\/\/ the proto definition for this message. If any rules are violated, an error\n\/\/ is returned.\nfunc (m *ResponseMapper) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\tif m.GetFilter() == nil {\n\t\treturn ResponseMapperValidationError{\n\t\t\tfield: \"Filter\",\n\t\t\treason: \"value is required\",\n\t\t}\n\t}\n\n\tif v, ok := interface{}(m.GetFilter()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn ResponseMapperValidationError{\n\t\t\t\tfield: \"Filter\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\tif wrapper := m.GetStatusCode(); wrapper != nil {\n\n\t\tif val := wrapper.GetValue(); val < 200 || val >= 600 {\n\t\t\treturn ResponseMapperValidationError{\n\t\t\t\tfield: \"StatusCode\",\n\t\t\t\treason: \"value must be inside range [200, 600)\",\n\t\t\t}\n\t\t}\n\n\t}\n\n\tif v, ok := interface{}(m.GetBody()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn ResponseMapperValidationError{\n\t\t\t\tfield: \"Body\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\tif v, ok := interface{}(m.GetBodyFormatOverride()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn ResponseMapperValidationError{\n\t\t\t\tfield: \"BodyFormatOverride\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\tif len(m.GetHeadersToAdd()) > 1000 {\n\t\treturn ResponseMapperValidationError{\n\t\t\tfield: \"HeadersToAdd\",\n\t\t\treason: \"value must contain no more than 1000 item(s)\",\n\t\t}\n\t}\n\n\tfor idx, item := range m.GetHeadersToAdd() {\n\t\t_, _ = idx, item\n\n\t\tif v, ok := interface{}(item).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn ResponseMapperValidationError{\n\t\t\t\t\tfield: fmt.Sprintf(\"HeadersToAdd[%v]\", idx),\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\n\treturn nil\n}\n\n\/\/ ResponseMapperValidationError is the validation error returned by\n\/\/ ResponseMapper.Validate if the designated constraints aren't met.\ntype ResponseMapperValidationError struct {\n\tfield string\n\treason string\n\tcause error\n\tkey bool\n}\n\n\/\/ Field function returns field value.\nfunc (e ResponseMapperValidationError) Field() string { return e.field }\n\n\/\/ Reason function returns reason value.\nfunc (e ResponseMapperValidationError) Reason() string { return e.reason }\n\n\/\/ Cause function returns cause value.\nfunc (e ResponseMapperValidationError) Cause() error { return e.cause }\n\n\/\/ Key function returns key value.\nfunc (e ResponseMapperValidationError) Key() bool { return e.key }\n\n\/\/ ErrorName returns error name.\nfunc (e ResponseMapperValidationError) ErrorName() string { return \"ResponseMapperValidationError\" }\n\n\/\/ Error satisfies the builtin error interface\nfunc (e ResponseMapperValidationError) Error() string {\n\tcause := \"\"\n\tif e.cause != nil {\n\t\tcause = fmt.Sprintf(\" | caused by: %v\", e.cause)\n\t}\n\n\tkey := \"\"\n\tif e.key {\n\t\tkey = \"key for \"\n\t}\n\n\treturn fmt.Sprintf(\n\t\t\"invalid %sResponseMapper.%s: %s%s\",\n\t\tkey,\n\t\te.field,\n\t\te.reason,\n\t\tcause)\n}\n\nvar _ error = ResponseMapperValidationError{}\n\nvar _ interface {\n\tField() string\n\tReason() string\n\tKey() bool\n\tCause() error\n\tErrorName() string\n} = ResponseMapperValidationError{}\n\n\/\/ Validate checks the field values on Rds with the rules defined in the proto\n\/\/ definition for this message. If any rules are violated, an error is returned.\nfunc (m *Rds) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\tif m.GetConfigSource() == nil {\n\t\treturn RdsValidationError{\n\t\t\tfield: \"ConfigSource\",\n\t\t\treason: \"value is required\",\n\t\t}\n\t}\n\n\tif v, ok := interface{}(m.GetConfigSource()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn RdsValidationError{\n\t\t\t\tfield: \"ConfigSource\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\t\/\/ no validation rules for RouteConfigName\n\n\treturn nil\n}\n\n\/\/ RdsValidationError is the validation error returned by Rds.Validate if the\n\/\/ designated constraints aren't met.\ntype RdsValidationError struct {\n\tfield string\n\treason string\n\tcause error\n\tkey bool\n}\n\n\/\/ Field function returns field value.\nfunc (e RdsValidationError) Field() string { return e.field }\n\n\/\/ Reason function returns reason value.\nfunc (e RdsValidationError) Reason() string { return e.reason }\n\n\/\/ Cause function returns cause value.\nfunc (e RdsValidationError) Cause() error { return e.cause }\n\n\/\/ Key function returns key value.\nfunc (e RdsValidationError) Key() bool { return e.key }\n\n\/\/ ErrorName returns error name.\nfunc (e RdsValidationError) ErrorName() string { return \"RdsValidationError\" }\n\n\/\/ Error satisfies the builtin error interface\nfunc (e RdsValidationError) Error() string {\n\tcause := \"\"\n\tif e.cause != nil {\n\t\tcause = fmt.Sprintf(\" | caused by: %v\", e.cause)\n\t}\n\n\tkey := \"\"\n\tif e.key {\n\t\tkey = \"key for \"\n\t}\n\n\treturn fmt.Sprintf(\n\t\t\"invalid %sRds.%s: %s%s\",\n\t\tkey,\n\t\te.field,\n\t\te.reason,\n\t\tcause)\n}\n\nvar _ error = RdsValidationError{}\n\nvar _ interface {\n\tField() string\n\tReason() string\n\tKey() bool\n\tCause() error\n\tErrorName() string\n} = RdsValidationError{}\n\n\/\/ Validate checks the field values on ScopedRouteConfigurationsList with the\n\/\/ rules defined in the proto definition for this message. If any rules are\n\/\/ violated, an error is returned.\nfunc (m *ScopedRouteConfigurationsList) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\tif len(m.GetScopedRouteConfigurations()) < 1 {\n\t\treturn ScopedRouteConfigurationsListValidationError{\n\t\t\tfield: \"ScopedRouteConfigurations\",\n\t\t\treason: \"value must contain at least 1 item(s)\",\n\t\t}\n\t}\n\n\tfor idx, item := range m.GetScopedRouteConfigurations() {\n\t\t_, _ = idx, item\n\n\t\tif v, ok := interface{}(item).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn ScopedRouteConfigurationsListValidationError{\n\t\t\t\t\tfield: fmt.Sprintf(\"ScopedRouteConfigurations[%v]\", idx),\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\n\treturn nil\n}\n\n\/\/ ScopedRouteConfigurationsListValidationError is the validation error\n\/\/ returned by ScopedRouteConfigurationsList.Validate if the designated\n\/\/ constraints aren't met.\ntype ScopedRouteConfigurationsListValidationError struct {\n\tfield string\n\treason string\n\tcause error\n\tkey bool\n}\n\n\/\/ Field function returns field value.\nfunc (e ScopedRouteConfigurationsListValidationError) Field() string { return e.field }\n\n\/\/ Reason function returns reason value.\nfunc (e ScopedRouteConfigurationsListValidationError) Reason() string { return e.reason }\n\n\/\/ Cause function returns cause value.\nfunc (e ScopedRouteConfigurationsListValidationError) Cause() error { return e.cause }\n\n\/\/ Key function returns key value.\nfunc (e ScopedRouteConfigurationsListValidationError) Key() bool { return e.key }\n\n\/\/ ErrorName returns error name.\nfunc (e ScopedRouteConfigurationsListValidationError) ErrorName() string {\n\treturn \"ScopedRouteConfigurationsListValidationError\"\n}\n\n\/\/ Error satisfies the builtin error interface\nfunc (e ScopedRouteConfigurationsListValidationError) Error() string {\n\tcause := \"\"\n\tif e.cause != nil {\n\t\tcause = fmt.Sprintf(\" | caused by: %v\", e.cause)\n\t}\n\n\tkey := \"\"\n\tif e.key {\n\t\tkey = \"key for \"\n\t}\n\n\treturn fmt.Sprintf(\n\t\t\"invalid %sScopedRouteConfigurationsList.%s: %s%s\",\n\t\tkey,\n\t\te.field,\n\t\te.reason,\n\t\tcause)\n}\n\nvar _ error = ScopedRouteConfigurationsListValidationError{}\n\nvar _ interface {\n\tField() string\n\tReason() string\n\tKey() bool\n\tCause() error\n\tErrorName() string\n} = ScopedRouteConfigurationsListValidationError{}\n\n\/\/ Validate checks the field values on ScopedRoutes with the rules defined in\n\/\/ the proto definition for this message. If any rules are violated, an error\n\/\/ is returned.\nfunc (m *ScopedRoutes) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\tif utf8.RuneCountInString(m.GetName()) < 1 {\n\t\treturn ScopedRoutesValidationError{\n\t\t\tfield: \"Name\",\n\t\t\treason: \"value length must be at least 1 runes\",\n\t\t}\n\t}\n\n\tif m.GetScopeKeyBuilder() == nil {\n\t\treturn ScopedRoutesValidationError{\n\t\t\tfield: \"ScopeKeyBuilder\",\n\t\t\treason: \"value is required\",\n\t\t}\n\t}\n\n\tif v, ok := interface{}(m.GetScopeKeyBuilder()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn ScopedRoutesValidationError{\n\t\t\t\tfield: \"ScopeKeyBuilder\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\tif m.GetRdsConfigSource() == nil {\n\t\treturn ScopedRoutesValidationError{\n\t\t\tfield: \"RdsConfigSource\",\n\t\t\treason: \"value is required\",\n\t\t}\n\t}\n\n\tif v, ok := interface{}(m.GetRdsConfigSource()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn ScopedRoutesValidationError{\n\t\t\t\tfield: \"RdsConfigSource\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\tswitch m.ConfigSpecifier.(type) {\n\n\tcase *ScopedRoutes_ScopedRouteConfigurationsList:\n\n\t\tif v, ok := interface{}(m.GetScopedRouteConfigurationsList()).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn ScopedRoutesValidationError{\n\t\t\t\t\tfield: \"ScopedRouteConfigurationsList\",\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\tcase *ScopedRoutes_ScopedRds:\n\n\t\tif v, ok := interface{}(m.GetScopedRds()).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn ScopedRoutesValidationError{\n\t\t\t\t\tfield: \"ScopedRds\",\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\tdefault:\n\t\treturn ScopedRoutesValidationError{\n\t\t\tfield: \"ConfigSpecifier\",\n\t\t\treason: \"value is required\",\n\t\t}\n\n\t}\n\n\treturn nil\n}\n\n\/\/ ScopedRoutesValidationError is the validation error returned by\n\/\/ ScopedRoutes.Validate if the designated constraints aren't met.\ntype ScopedRoutesValidationError struct {\n\tfield string\n\treason string\n\tcause error\n\tkey bool\n}\n\n\/\/ Field function returns field value.\nfunc (e ScopedRoutesValidationError) Field() string { return e.field }\n\n\/\/ Reason function returns reason value.\nfunc (e ScopedRoutesValidationError) Reason() string { return e.reason }\n\n\/\/ Cause function returns cause value.\nfunc (e ScopedRoutesValidationError) Cause() error { return e.cause }\n\n\/\/ Key function returns key value.\nfunc (e ScopedRoutesValidationError) Key() bool { return e.key }\n\n\/\/ ErrorName returns error name.\nfunc (e ScopedRoutesValidationError) ErrorName() string { return \"ScopedRoutesValidationError\" }\n\n\/\/ Error satisfies the builtin error interface\nfunc (e ScopedRoutesValidationError) Error() string {\n\tcause := \"\"\n\tif e.cause != nil {\n\t\tcause = fmt.Sprintf(\" | caused by: %v\", e.cause)\n\t}\n\n\tkey := \"\"\n\tif e.key {\n\t\tkey = \"key for \"\n\t}\n\n\treturn fmt.Sprintf(\n\t\t\"invalid %sScopedRoutes.%s: %s%s\",\n\t\tkey,\n\t\te.field,\n\t\te.reason,\n\t\tcause)\n}\n\nvar _ error = ScopedRoutesValidationError{}\n\nvar _ interface {\n\tField() string\n\tReason() string\n\tKey() bool\n\tCause() error\n\tErrorName() string\n} = ScopedRoutesValidationError{}\n\n\/\/ Validate checks the field values on ScopedRds with the rules defined in the\n\/\/ proto definition for this message. If any rules are violated, an error is returned.\nfunc (m *ScopedRds) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\tif m.GetScopedRdsConfigSource() == nil {\n\t\treturn ScopedRdsValidationError{\n\t\t\tfield: \"ScopedRdsConfigSource\",\n\t\t\treason: \"value is required\",\n\t\t}\n\t}\n\n\tif v, ok := interface{}(m.GetScopedRdsConfigSource()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn ScopedRdsValidationError{\n\t\t\t\tfield: \"ScopedRdsConfigSource\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\t\/\/ no validation rules for SrdsResourcesLocator\n\n\treturn nil\n}\n\n\/\/ ScopedRdsValidationError is the validation error returned by\n\/\/ ScopedRds.Validate if the designated constraints aren't met.\ntype ScopedRdsValidationError struct {\n\tfield string\n\treason string\n\tcause error\n\tkey bool\n}\n\n\/\/ Field function returns field value.\nfunc (e ScopedRdsValidationError) Field() string { return e.field }\n\n\/\/ Reason function returns reason value.\nfunc (e ScopedRdsValidationError) Reason() string { return e.reason }\n\n\/\/ Cause function returns cause value.\nfunc (e ScopedRdsValidationError) Cause() error { return e.cause }\n\n\/\/ Key function returns key value.\nfunc (e ScopedRdsValidationError) Key() bool { return e.key }\n\n\/\/ ErrorName returns error name.\nfunc (e ScopedRdsValidationError) ErrorName() string { return \"ScopedRdsValidationError\" }\n\n\/\/ Error satisfies the builtin error interface\nfunc (e ScopedRdsValidationError) Error() string {\n\tcause := \"\"\n\tif e.cause != nil {\n\t\tcause = fmt.Sprintf(\" | caused by: %v\", e.cause)\n\t}\n\n\tkey := \"\"\n\tif e.key {\n\t\tkey = \"key for \"\n\t}\n\n\treturn fmt.Sprintf(\n\t\t\"invalid %sScopedRds.%s: %s%s\",\n\t\tkey,\n\t\te.field,\n\t\te.reason,\n\t\tcause)\n}\n\nvar _ error = ScopedRdsValidationError{}\n\nvar _ interface {\n\tField() string\n\tReason() string\n\tKey() bool\n\tCause() error\n\tErrorName() string\n} = ScopedRdsValidationError{}\n\n\/\/ Validate checks the field values on HttpFilter with the rules defined in the\n\/\/ proto definition for this message. If any rules are violated, an error is returned.\nfunc (m *HttpFilter) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\tif utf8.RuneCountInString(m.GetName()) < 1 {\n\t\treturn HttpFilterValidationError{\n\t\t\tfield: \"Name\",\n\t\t\treason: \"value length must be at least 1 runes\",\n\t\t}\n\t}\n\n\t\/\/ no validation rules for IsOptional\n\n\tswitch m.ConfigType.(type) {\n\n\tcase *HttpFilter_TypedConfig:\n\n\t\tif v, ok := interface{}(m.GetTypedConfig()).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn HttpFilterValidationError{\n\t\t\t\t\tfield: \"TypedConfig\",\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\tcase *HttpFilter_ConfigDiscovery:\n\n\t\tif v, ok := interface{}(m.GetConfigDiscovery()).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn HttpFilterValidationError{\n\t\t\t\t\tfield: \"ConfigDiscovery\",\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\tcase *HttpFilter_HiddenEnvoyDeprecatedConfig:\n\n\t\tif v, ok := interface{}(m.GetHiddenEnvoyDeprecatedConfig()).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn HttpFilterValidationError{\n\t\t\t\t\tfield: \"HiddenEnvoyDeprecatedConfig\",\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\n\treturn nil\n}\n\n\/\/ HttpFilterValidationError is the validation error returned by\n\/\/ HttpFilter.Validate if the designated constraints aren't met.\ntype HttpFilterValidationError struct {\n\tfield string\n\treason string\n\tcause error\n\tkey bool\n}\n\n\/\/ Field function returns field value.\nfunc (e HttpFilterValidationError) Field() string { return e.field }\n\n\/\/ Reason function returns reason value.\nfunc (e HttpFilterValidationError) Reason() string { return e.reason }\n\n\/\/ Cause function returns cause value.\nfunc (e HttpFilterValidationError) Cause() error { return e.cause }\n\n\/\/ Key function returns key value.\nfunc (e HttpFilterValidationError) Key() bool { return e.key }\n\n\/\/ ErrorName returns error name.\nfunc (e HttpFilterValidationError) ErrorName() string { return \"HttpFilterValidationError\" }\n\n\/\/ Error satisfies the builtin error interface\nfunc (e HttpFilterValidationError) Error() string {\n\tcause := \"\"\n\tif e.cause != nil {\n\t\tcause = fmt.Sprintf(\" | caused by: %v\", e.cause)\n\t}\n\n\tkey := \"\"\n\tif e.key {\n\t\tkey = \"key for \"\n\t}\n\n\treturn fmt.Sprintf(\n\t\t\"invalid %sHttpFilter.%s: %s%s\",\n\t\tkey,\n\t\te.field,\n\t\te.reason,\n\t\tcause)\n}\n\nvar _ error = HttpFilterValidationError{}\n\nvar _ interface {\n\tField() string\n\tReason() string\n\tKey() bool\n\tCause() error\n\tErrorName() string\n} = HttpFilterValidationError{}\n\n\/\/ Validate checks the field values on RequestIDExtension with the rules\n\/\/ defined in the proto definition for this message. If any rules are\n\/\/ violated, an error is returned.\nfunc (m *RequestIDExtension) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\tif v, ok := interface{}(m.GetTypedConfig()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn RequestIDExtensionValidationError{\n\t\t\t\tfield: \"TypedConfig\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}\n\n\/\/ RequestIDExtensionValidationError is the validation error returned by\n\/\/ RequestIDExtension.Validate if the designated constraints aren't met.\ntype RequestIDExtensionValidationError struct {\n\tfield string\n\treason string\n\tcause error\n\tkey bool\n}\n\n\/\/ Field function returns field value.\nfunc (e RequestIDExtensionValidationError) Field() string { return e.field }\n\n\/\/ Reason function returns reason value.\nfunc (e RequestIDExtensionValidationError) Reason() string { return e.reason }\n\n\/\/ Cause function returns cause value.\nfunc (e RequestIDExtensionValidationError) Cause() error { return e.cause }\n\n\/\/ Key function returns key value.\nfunc (e RequestIDExtensionValidationError) Key() bool { return e.key }\n\n\/\/ ErrorName returns error name.\nfunc (e RequestIDExtensionValidationError) ErrorName() string {\n\treturn \"RequestIDExtensionValidationError\"\n}\n\n\/\/ Error satisfies the builtin error interface\nfunc (e RequestIDExtensionValidationError) Error() string {\n\tcause := \"\"\n\tif e.cause != nil {\n\t\tcause = fmt.Sprintf(\" | caused by: %v\", e.cause)\n\t}\n\n\tkey := \"\"\n\tif e.key {\n\t\tkey = \"key for \"\n\t}\n\n\treturn fmt.Sprintf(\n\t\t\"invalid %sRequestIDExtension.%s: %s%s\",\n\t\tkey,\n\t\te.field,\n\t\te.reason,\n\t\tcause)\n}\n\nvar _ error = RequestIDExtensionValidationError{}\n\nvar _ interface {\n\tField() string\n\tReason() string\n\tKey() bool\n\tCause() error\n\tErrorName() string\n} = RequestIDExtensionValidationError{}\n\n\/\/ Validate checks the field values on EnvoyMobileHttpConnectionManager with\n\/\/ the rules defined in the proto definition for this message. If any rules\n\/\/ are violated, an error is returned.\nfunc (m *EnvoyMobileHttpConnectionManager) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\tif v, ok := interface{}(m.GetConfig()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn EnvoyMobileHttpConnectionManagerValidationError{\n\t\t\t\tfield: \"Config\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}\n\n\/\/ EnvoyMobileHttpConnectionManagerValidationError is the validation error\n\/\/ returned by EnvoyMobileHttpConnectionManager.Validate if the designated\n\/\/ constraints aren't met.\ntype EnvoyMobileHttpConnectionManagerValidationError struct {\n\tfield string\n\treason string\n\tcause error\n\tkey bool\n}\n\n\/\/ Field function returns field value.\nfunc (e EnvoyMobileHttpConnectionManagerValidationError) Field() string { return e.field }\n\n\/\/ Reason function returns reason value.\nfunc (e EnvoyMobileHttpConnectionManagerValidationError) Reason() string { return e.reason }\n\n\/\/ Cause function returns cause value.\nfunc (e EnvoyMobileHttpConnectionManagerValidationError) Cause() error { return e.cause }\n\n\/\/ Key function returns key value.\nfunc (e EnvoyMobileHttpConnectionManagerValidationError) Key() bool { return e.key }\n\n\/\/ ErrorName returns error name.\nfunc (e EnvoyMobileHttpConnectionManagerValidationError) ErrorName() string {\n\treturn \"EnvoyMobileHttpConnectionManagerValidationError\"\n}\n\n\/\/ Error satisfies the builtin error interface\nfunc (e EnvoyMobileHttpConnectionManagerValidationError) Error() string {\n\tcause := \"\"\n\tif e.cause != nil {\n\t\tcause = fmt.Sprintf(\" | caused by: %v\", e.cause)\n\t}\n\n\tkey := \"\"\n\tif e.key {\n\t\tkey = \"key for \"\n\t}\n\n\treturn fmt.Sprintf(\n\t\t\"invalid %sEnvoyMobileHttpConnectionManager.%s: %s%s\",\n\t\tkey,\n\t\te.field,\n\t\te.reason,\n\t\tcause)\n}\n\nvar _ error = EnvoyMobileHttpConnectionManagerValidationError{}\n\nvar _ interface {\n\tField() string\n\tReason() string\n\tKey() bool\n\tCause() error\n\tErrorName() string\n} = EnvoyMobileHttpConnectionManagerValidationError{}\n\n\/\/ Validate checks the field values on HttpConnectionManager_Tracing with the\n\/\/ rules defined in the proto definition for this message. If any rules are\n\/\/ violated, an error is returned.\nfunc (m *HttpConnectionManager_Tracing) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\tif v, ok := interface{}(m.GetClientSampling()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn HttpConnectionManager_TracingValidationError{\n\t\t\t\tfield: \"ClientSampling\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\tif v, ok := interface{}(m.GetRandomSampling()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn HttpConnectionManager_TracingValidationError{\n\t\t\t\tfield: \"RandomSampling\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\tif v, ok := interface{}(m.GetOverallSampling()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn HttpConnectionManager_TracingValidationError{\n\t\t\t\tfield: \"OverallSampling\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\t\/\/ no validation rules for Verbose\n\n\tif v, ok := interface{}(m.GetMaxPathTagLength()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn HttpConnectionManager_TracingValidationError{\n\t\t\t\tfield: \"MaxPathTagLength\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\tfor idx, item := range m.GetCustomTags() {\n\t\t_, _ = idx, item\n\n\t\tif v, ok := interface{}(item).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn HttpConnectionManager_TracingValidationError{\n\t\t\t\t\tfield: fmt.Sprintf(\"CustomTags[%v]\", idx),\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\n\tif v, ok := interface{}(m.GetProvider()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn HttpConnectionManager_TracingValidationError{\n\t\t\t\tfield: \"Provider\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\tif _, ok := HttpConnectionManager_Tracing_OperationName_name[int32(m.GetHiddenEnvoyDeprecatedOperationName())]; !ok {\n\t\treturn HttpConnectionManager_TracingValidationError{\n\t\t\tfield: \"HiddenEnvoyDeprecatedOperationName\",\n\t\t\treason: \"value must be one of the defined enum values\",\n\t\t}\n\t}\n\n\treturn nil\n}\n\n\/\/ HttpConnectionManager_TracingValidationError is the validation error\n\/\/ returned by HttpConnectionManager_Tracing.Validate if the designated\n\/\/ constraints aren't met.\ntype HttpConnectionManager_TracingValidationError struct {\n\tfield string\n\treason string\n\tcause error\n\tkey bool\n}\n\n\/\/ Field function returns field value.\nfunc (e HttpConnectionManager_TracingValidationError) Field() string { return e.field }\n\n\/\/ Reason function returns reason value.\nfunc (e HttpConnectionManager_TracingValidationError) Reason() string { return e.reason }\n\n\/\/ Cause function returns cause value.\nfunc (e HttpConnectionManager_TracingValidationError) Cause() error { return e.cause }\n\n\/\/ Key function returns key value.\nfunc (e HttpConnectionManager_TracingValidationError) Key() bool { return e.key }\n\n\/\/ ErrorName returns error name.\nfunc (e HttpConnectionManager_TracingValidationError) ErrorName() string {\n\treturn \"HttpConnectionManager_TracingValidationError\"\n}\n\n\/\/ Error satisfies the builtin error interface\nfunc (e HttpConnectionManager_TracingValidationError) Error() string {\n\tcause := \"\"\n\tif e.cause != nil {\n\t\tcause = fmt.Sprintf(\" | caused by: %v\", e.cause)\n\t}\n\n\tkey := \"\"\n\tif e.key {\n\t\tkey = \"key for \"\n\t}\n\n\treturn fmt.Sprintf(\n\t\t\"invalid %sHttpConnectionManager_Tracing.%s: %s%s\",\n\t\tkey,\n\t\te.field,\n\t\te.reason,\n\t\tcause)\n}\n\nvar _ error = HttpConnectionManager_TracingValidationError{}\n\nvar _ interface {\n\tField() string\n\tReason() string\n\tKey() bool\n\tCause() error\n\tErrorName() string\n} = HttpConnectionManager_TracingValidationError{}\n\n\/\/ Validate checks the field values on\n\/\/ HttpConnectionManager_InternalAddressConfig with the rules defined in the\n\/\/ proto definition for this message. If any rules are violated, an error is returned.\nfunc (m *HttpConnectionManager_InternalAddressConfig) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t\/\/ no validation rules for UnixSockets\n\n\treturn nil\n}\n\n\/\/ HttpConnectionManager_InternalAddressConfigValidationError is the validation\n\/\/ error returned by HttpConnectionManager_InternalAddressConfig.Validate if\n\/\/ the designated constraints aren't met.\ntype HttpConnectionManager_InternalAddressConfigValidationError struct {\n\tfield string\n\treason string\n\tcause error\n\tkey bool\n}\n\n\/\/ Field function returns field value.\nfunc (e HttpConnectionManager_InternalAddressConfigValidationError) Field() string { return e.field }\n\n\/\/ Reason function returns reason value.\nfunc (e HttpConnectionManager_InternalAddressConfigValidationError) Reason() string { return e.reason }\n\n\/\/ Cause function returns cause value.\nfunc (e HttpConnectionManager_InternalAddressConfigValidationError) Cause() error { return e.cause }\n\n\/\/ Key function returns key value.\nfunc (e HttpConnectionManager_InternalAddressConfigValidationError) Key() bool { return e.key }\n\n\/\/ ErrorName returns error name.\nfunc (e HttpConnectionManager_InternalAddressConfigValidationError) ErrorName() string {\n\treturn \"HttpConnectionManager_InternalAddressConfigValidationError\"\n}\n\n\/\/ Error satisfies the builtin error interface\nfunc (e HttpConnectionManager_InternalAddressConfigValidationError) Error() string {\n\tcause := \"\"\n\tif e.cause != nil {\n\t\tcause = fmt.Sprintf(\" | caused by: %v\", e.cause)\n\t}\n\n\tkey := \"\"\n\tif e.key {\n\t\tkey = \"key for \"\n\t}\n\n\treturn fmt.Sprintf(\n\t\t\"invalid %sHttpConnectionManager_InternalAddressConfig.%s: %s%s\",\n\t\tkey,\n\t\te.field,\n\t\te.reason,\n\t\tcause)\n}\n\nvar _ error = HttpConnectionManager_InternalAddressConfigValidationError{}\n\nvar _ interface {\n\tField() string\n\tReason() string\n\tKey() bool\n\tCause() error\n\tErrorName() string\n} = HttpConnectionManager_InternalAddressConfigValidationError{}\n\n\/\/ Validate checks the field values on\n\/\/ HttpConnectionManager_SetCurrentClientCertDetails with the rules defined in\n\/\/ the proto definition for this message. If any rules are violated, an error\n\/\/ is returned.\nfunc (m *HttpConnectionManager_SetCurrentClientCertDetails) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\tif v, ok := interface{}(m.GetSubject()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn HttpConnectionManager_SetCurrentClientCertDetailsValidationError{\n\t\t\t\tfield: \"Subject\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\t\/\/ no validation rules for Cert\n\n\t\/\/ no validation rules for Chain\n\n\t\/\/ no validation rules for Dns\n\n\t\/\/ no validation rules for Uri\n\n\treturn nil\n}\n\n\/\/ HttpConnectionManager_SetCurrentClientCertDetailsValidationError is the\n\/\/ validation error returned by\n\/\/ HttpConnectionManager_SetCurrentClientCertDetails.Validate if the\n\/\/ designated constraints aren't met.\ntype HttpConnectionManager_SetCurrentClientCertDetailsValidationError struct {\n\tfield string\n\treason string\n\tcause error\n\tkey bool\n}\n\n\/\/ Field function returns field value.\nfunc (e HttpConnectionManager_SetCurrentClientCertDetailsValidationError) Field() string {\n\treturn e.field\n}\n\n\/\/ Reason function returns reason value.\nfunc (e HttpConnectionManager_SetCurrentClientCertDetailsValidationError) Reason() string {\n\treturn e.reason\n}\n\n\/\/ Cause function returns cause value.\nfunc (e HttpConnectionManager_SetCurrentClientCertDetailsValidationError) Cause() error {\n\treturn e.cause\n}\n\n\/\/ Key function returns key value.\nfunc (e HttpConnectionManager_SetCurrentClientCertDetailsValidationError) Key() bool { return e.key }\n\n\/\/ ErrorName returns error name.\nfunc (e HttpConnectionManager_SetCurrentClientCertDetailsValidationError) ErrorName() string {\n\treturn \"HttpConnectionManager_SetCurrentClientCertDetailsValidationError\"\n}\n\n\/\/ Error satisfies the builtin error interface\nfunc (e HttpConnectionManager_SetCurrentClientCertDetailsValidationError) Error() string {\n\tcause := \"\"\n\tif e.cause != nil {\n\t\tcause = fmt.Sprintf(\" | caused by: %v\", e.cause)\n\t}\n\n\tkey := \"\"\n\tif e.key {\n\t\tkey = \"key for \"\n\t}\n\n\treturn fmt.Sprintf(\n\t\t\"invalid %sHttpConnectionManager_SetCurrentClientCertDetails.%s: %s%s\",\n\t\tkey,\n\t\te.field,\n\t\te.reason,\n\t\tcause)\n}\n\nvar _ error = HttpConnectionManager_SetCurrentClientCertDetailsValidationError{}\n\nvar _ interface {\n\tField() string\n\tReason() string\n\tKey() bool\n\tCause() error\n\tErrorName() string\n} = HttpConnectionManager_SetCurrentClientCertDetailsValidationError{}\n\n\/\/ Validate checks the field values on HttpConnectionManager_UpgradeConfig with\n\/\/ the rules defined in the proto definition for this message. If any rules\n\/\/ are violated, an error is returned.\nfunc (m *HttpConnectionManager_UpgradeConfig) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t\/\/ no validation rules for UpgradeType\n\n\tfor idx, item := range m.GetFilters() {\n\t\t_, _ = idx, item\n\n\t\tif v, ok := interface{}(item).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn HttpConnectionManager_UpgradeConfigValidationError{\n\t\t\t\t\tfield: fmt.Sprintf(\"Filters[%v]\", idx),\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\n\tif v, ok := interface{}(m.GetEnabled()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn HttpConnectionManager_UpgradeConfigValidationError{\n\t\t\t\tfield: \"Enabled\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}\n\n\/\/ HttpConnectionManager_UpgradeConfigValidationError is the validation error\n\/\/ returned by HttpConnectionManager_UpgradeConfig.Validate if the designated\n\/\/ constraints aren't met.\ntype HttpConnectionManager_UpgradeConfigValidationError struct {\n\tfield string\n\treason string\n\tcause error\n\tkey bool\n}\n\n\/\/ Field function returns field value.\nfunc (e HttpConnectionManager_UpgradeConfigValidationError) Field() string { return e.field }\n\n\/\/ Reason function returns reason value.\nfunc (e HttpConnectionManager_UpgradeConfigValidationError) Reason() string { return e.reason }\n\n\/\/ Cause function returns cause value.\nfunc (e HttpConnectionManager_UpgradeConfigValidationError) Cause() error { return e.cause }\n\n\/\/ Key function returns key value.\nfunc (e HttpConnectionManager_UpgradeConfigValidationError) Key() bool { return e.key }\n\n\/\/ ErrorName returns error name.\nfunc (e HttpConnectionManager_UpgradeConfigValidationError) ErrorName() string {\n\treturn \"HttpConnectionManager_UpgradeConfigValidationError\"\n}\n\n\/\/ Error satisfies the builtin error interface\nfunc (e HttpConnectionManager_UpgradeConfigValidationError) Error() string {\n\tcause := \"\"\n\tif e.cause != nil {\n\t\tcause = fmt.Sprintf(\" | caused by: %v\", e.cause)\n\t}\n\n\tkey := \"\"\n\tif e.key {\n\t\tkey = \"key for \"\n\t}\n\n\treturn fmt.Sprintf(\n\t\t\"invalid %sHttpConnectionManager_UpgradeConfig.%s: %s%s\",\n\t\tkey,\n\t\te.field,\n\t\te.reason,\n\t\tcause)\n}\n\nvar _ error = HttpConnectionManager_UpgradeConfigValidationError{}\n\nvar _ interface {\n\tField() string\n\tReason() string\n\tKey() bool\n\tCause() error\n\tErrorName() string\n} = HttpConnectionManager_UpgradeConfigValidationError{}\n\n\/\/ Validate checks the field values on\n\/\/ HttpConnectionManager_PathNormalizationOptions with the rules defined in\n\/\/ the proto definition for this message. If any rules are violated, an error\n\/\/ is returned.\nfunc (m *HttpConnectionManager_PathNormalizationOptions) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\tif v, ok := interface{}(m.GetForwardingTransformation()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn HttpConnectionManager_PathNormalizationOptionsValidationError{\n\t\t\t\tfield: \"ForwardingTransformation\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\tif v, ok := interface{}(m.GetHttpFilterTransformation()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn HttpConnectionManager_PathNormalizationOptionsValidationError{\n\t\t\t\tfield: \"HttpFilterTransformation\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}\n\n\/\/ HttpConnectionManager_PathNormalizationOptionsValidationError is the\n\/\/ validation error returned by\n\/\/ HttpConnectionManager_PathNormalizationOptions.Validate if the designated\n\/\/ constraints aren't met.\ntype HttpConnectionManager_PathNormalizationOptionsValidationError struct {\n\tfield string\n\treason string\n\tcause error\n\tkey bool\n}\n\n\/\/ Field function returns field value.\nfunc (e HttpConnectionManager_PathNormalizationOptionsValidationError) Field() string { return e.field }\n\n\/\/ Reason function returns reason value.\nfunc (e HttpConnectionManager_PathNormalizationOptionsValidationError) Reason() string {\n\treturn e.reason\n}\n\n\/\/ Cause function returns cause value.\nfunc (e HttpConnectionManager_PathNormalizationOptionsValidationError) Cause() error { return e.cause }\n\n\/\/ Key function returns key value.\nfunc (e HttpConnectionManager_PathNormalizationOptionsValidationError) Key() bool { return e.key }\n\n\/\/ ErrorName returns error name.\nfunc (e HttpConnectionManager_PathNormalizationOptionsValidationError) ErrorName() string {\n\treturn \"HttpConnectionManager_PathNormalizationOptionsValidationError\"\n}\n\n\/\/ Error satisfies the builtin error interface\nfunc (e HttpConnectionManager_PathNormalizationOptionsValidationError) Error() string {\n\tcause := \"\"\n\tif e.cause != nil {\n\t\tcause = fmt.Sprintf(\" | caused by: %v\", e.cause)\n\t}\n\n\tkey := \"\"\n\tif e.key {\n\t\tkey = \"key for \"\n\t}\n\n\treturn fmt.Sprintf(\n\t\t\"invalid %sHttpConnectionManager_PathNormalizationOptions.%s: %s%s\",\n\t\tkey,\n\t\te.field,\n\t\te.reason,\n\t\tcause)\n}\n\nvar _ error = HttpConnectionManager_PathNormalizationOptionsValidationError{}\n\nvar _ interface {\n\tField() string\n\tReason() string\n\tKey() bool\n\tCause() error\n\tErrorName() string\n} = HttpConnectionManager_PathNormalizationOptionsValidationError{}\n\n\/\/ Validate checks the field values on ScopedRoutes_ScopeKeyBuilder with the\n\/\/ rules defined in the proto definition for this message. If any rules are\n\/\/ violated, an error is returned.\nfunc (m *ScopedRoutes_ScopeKeyBuilder) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\tif len(m.GetFragments()) < 1 {\n\t\treturn ScopedRoutes_ScopeKeyBuilderValidationError{\n\t\t\tfield: \"Fragments\",\n\t\t\treason: \"value must contain at least 1 item(s)\",\n\t\t}\n\t}\n\n\tfor idx, item := range m.GetFragments() {\n\t\t_, _ = idx, item\n\n\t\tif v, ok := interface{}(item).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn ScopedRoutes_ScopeKeyBuilderValidationError{\n\t\t\t\t\tfield: fmt.Sprintf(\"Fragments[%v]\", idx),\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\n\treturn nil\n}\n\n\/\/ ScopedRoutes_ScopeKeyBuilderValidationError is the validation error returned\n\/\/ by ScopedRoutes_ScopeKeyBuilder.Validate if the designated constraints\n\/\/ aren't met.\ntype ScopedRoutes_ScopeKeyBuilderValidationError struct {\n\tfield string\n\treason string\n\tcause error\n\tkey bool\n}\n\n\/\/ Field function returns field value.\nfunc (e ScopedRoutes_ScopeKeyBuilderValidationError) Field() string { return e.field }\n\n\/\/ Reason function returns reason value.\nfunc (e ScopedRoutes_ScopeKeyBuilderValidationError) Reason() string { return e.reason }\n\n\/\/ Cause function returns cause value.\nfunc (e ScopedRoutes_ScopeKeyBuilderValidationError) Cause() error { return e.cause }\n\n\/\/ Key function returns key value.\nfunc (e ScopedRoutes_ScopeKeyBuilderValidationError) Key() bool { return e.key }\n\n\/\/ ErrorName returns error name.\nfunc (e ScopedRoutes_ScopeKeyBuilderValidationError) ErrorName() string {\n\treturn \"ScopedRoutes_ScopeKeyBuilderValidationError\"\n}\n\n\/\/ Error satisfies the builtin error interface\nfunc (e ScopedRoutes_ScopeKeyBuilderValidationError) Error() string {\n\tcause := \"\"\n\tif e.cause != nil {\n\t\tcause = fmt.Sprintf(\" | caused by: %v\", e.cause)\n\t}\n\n\tkey := \"\"\n\tif e.key {\n\t\tkey = \"key for \"\n\t}\n\n\treturn fmt.Sprintf(\n\t\t\"invalid %sScopedRoutes_ScopeKeyBuilder.%s: %s%s\",\n\t\tkey,\n\t\te.field,\n\t\te.reason,\n\t\tcause)\n}\n\nvar _ error = ScopedRoutes_ScopeKeyBuilderValidationError{}\n\nvar _ interface {\n\tField() string\n\tReason() string\n\tKey() bool\n\tCause() error\n\tErrorName() string\n} = ScopedRoutes_ScopeKeyBuilderValidationError{}\n\n\/\/ Validate checks the field values on\n\/\/ ScopedRoutes_ScopeKeyBuilder_FragmentBuilder with the rules defined in the\n\/\/ proto definition for this message. If any rules are violated, an error is returned.\nfunc (m *ScopedRoutes_ScopeKeyBuilder_FragmentBuilder) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\tswitch m.Type.(type) {\n\n\tcase *ScopedRoutes_ScopeKeyBuilder_FragmentBuilder_HeaderValueExtractor_:\n\n\t\tif v, ok := interface{}(m.GetHeaderValueExtractor()).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn ScopedRoutes_ScopeKeyBuilder_FragmentBuilderValidationError{\n\t\t\t\t\tfield: \"HeaderValueExtractor\",\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\tdefault:\n\t\treturn ScopedRoutes_ScopeKeyBuilder_FragmentBuilderValidationError{\n\t\t\tfield: \"Type\",\n\t\t\treason: \"value is required\",\n\t\t}\n\n\t}\n\n\treturn nil\n}\n\n\/\/ ScopedRoutes_ScopeKeyBuilder_FragmentBuilderValidationError is the\n\/\/ validation error returned by\n\/\/ ScopedRoutes_ScopeKeyBuilder_FragmentBuilder.Validate if the designated\n\/\/ constraints aren't met.\ntype ScopedRoutes_ScopeKeyBuilder_FragmentBuilderValidationError struct {\n\tfield string\n\treason string\n\tcause error\n\tkey bool\n}\n\n\/\/ Field function returns field value.\nfunc (e ScopedRoutes_ScopeKeyBuilder_FragmentBuilderValidationError) Field() string { return e.field }\n\n\/\/ Reason function returns reason value.\nfunc (e ScopedRoutes_ScopeKeyBuilder_FragmentBuilderValidationError) Reason() string { return e.reason }\n\n\/\/ Cause function returns cause value.\nfunc (e ScopedRoutes_ScopeKeyBuilder_FragmentBuilderValidationError) Cause() error { return e.cause }\n\n\/\/ Key function returns key value.\nfunc (e ScopedRoutes_ScopeKeyBuilder_FragmentBuilderValidationError) Key() bool { return e.key }\n\n\/\/ ErrorName returns error name.\nfunc (e ScopedRoutes_ScopeKeyBuilder_FragmentBuilderValidationError) ErrorName() string {\n\treturn \"ScopedRoutes_ScopeKeyBuilder_FragmentBuilderValidationError\"\n}\n\n\/\/ Error satisfies the builtin error interface\nfunc (e ScopedRoutes_ScopeKeyBuilder_FragmentBuilderValidationError) Error() string {\n\tcause := \"\"\n\tif e.cause != nil {\n\t\tcause = fmt.Sprintf(\" | caused by: %v\", e.cause)\n\t}\n\n\tkey := \"\"\n\tif e.key {\n\t\tkey = \"key for \"\n\t}\n\n\treturn fmt.Sprintf(\n\t\t\"invalid %sScopedRoutes_ScopeKeyBuilder_FragmentBuilder.%s: %s%s\",\n\t\tkey,\n\t\te.field,\n\t\te.reason,\n\t\tcause)\n}\n\nvar _ error = ScopedRoutes_ScopeKeyBuilder_FragmentBuilderValidationError{}\n\nvar _ interface {\n\tField() string\n\tReason() string\n\tKey() bool\n\tCause() error\n\tErrorName() string\n} = ScopedRoutes_ScopeKeyBuilder_FragmentBuilderValidationError{}\n\n\/\/ Validate checks the field values on\n\/\/ ScopedRoutes_ScopeKeyBuilder_FragmentBuilder_HeaderValueExtractor with the\n\/\/ rules defined in the proto definition for this message. If any rules are\n\/\/ violated, an error is returned.\nfunc (m *ScopedRoutes_ScopeKeyBuilder_FragmentBuilder_HeaderValueExtractor) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\tif utf8.RuneCountInString(m.GetName()) < 1 {\n\t\treturn ScopedRoutes_ScopeKeyBuilder_FragmentBuilder_HeaderValueExtractorValidationError{\n\t\t\tfield: \"Name\",\n\t\t\treason: \"value length must be at least 1 runes\",\n\t\t}\n\t}\n\n\t\/\/ no validation rules for ElementSeparator\n\n\tswitch m.ExtractType.(type) {\n\n\tcase *ScopedRoutes_ScopeKeyBuilder_FragmentBuilder_HeaderValueExtractor_Index:\n\t\t\/\/ no validation rules for Index\n\n\tcase *ScopedRoutes_ScopeKeyBuilder_FragmentBuilder_HeaderValueExtractor_Element:\n\n\t\tif v, ok := interface{}(m.GetElement()).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn ScopedRoutes_ScopeKeyBuilder_FragmentBuilder_HeaderValueExtractorValidationError{\n\t\t\t\t\tfield: \"Element\",\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\n\treturn nil\n}\n\n\/\/ ScopedRoutes_ScopeKeyBuilder_FragmentBuilder_HeaderValueExtractorValidationError\n\/\/ is the validation error returned by\n\/\/ ScopedRoutes_ScopeKeyBuilder_FragmentBuilder_HeaderValueExtractor.Validate\n\/\/ if the designated constraints aren't met.\ntype ScopedRoutes_ScopeKeyBuilder_FragmentBuilder_HeaderValueExtractorValidationError struct {\n\tfield string\n\treason string\n\tcause error\n\tkey bool\n}\n\n\/\/ Field function returns field value.\nfunc (e ScopedRoutes_ScopeKeyBuilder_FragmentBuilder_HeaderValueExtractorValidationError) Field() string {\n\treturn e.field\n}\n\n\/\/ Reason function returns reason value.\nfunc (e ScopedRoutes_ScopeKeyBuilder_FragmentBuilder_HeaderValueExtractorValidationError) Reason() string {\n\treturn e.reason\n}\n\n\/\/ Cause function returns cause value.\nfunc (e ScopedRoutes_ScopeKeyBuilder_FragmentBuilder_HeaderValueExtractorValidationError) Cause() error {\n\treturn e.cause\n}\n\n\/\/ Key function returns key value.\nfunc (e ScopedRoutes_ScopeKeyBuilder_FragmentBuilder_HeaderValueExtractorValidationError) Key() bool {\n\treturn e.key\n}\n\n\/\/ ErrorName returns error name.\nfunc (e ScopedRoutes_ScopeKeyBuilder_FragmentBuilder_HeaderValueExtractorValidationError) ErrorName() string {\n\treturn \"ScopedRoutes_ScopeKeyBuilder_FragmentBuilder_HeaderValueExtractorValidationError\"\n}\n\n\/\/ Error satisfies the builtin error interface\nfunc (e ScopedRoutes_ScopeKeyBuilder_FragmentBuilder_HeaderValueExtractorValidationError) Error() string {\n\tcause := \"\"\n\tif e.cause != nil {\n\t\tcause = fmt.Sprintf(\" | caused by: %v\", e.cause)\n\t}\n\n\tkey := \"\"\n\tif e.key {\n\t\tkey = \"key for \"\n\t}\n\n\treturn fmt.Sprintf(\n\t\t\"invalid %sScopedRoutes_ScopeKeyBuilder_FragmentBuilder_HeaderValueExtractor.%s: %s%s\",\n\t\tkey,\n\t\te.field,\n\t\te.reason,\n\t\tcause)\n}\n\nvar _ error = ScopedRoutes_ScopeKeyBuilder_FragmentBuilder_HeaderValueExtractorValidationError{}\n\nvar _ interface {\n\tField() string\n\tReason() string\n\tKey() bool\n\tCause() error\n\tErrorName() string\n} = ScopedRoutes_ScopeKeyBuilder_FragmentBuilder_HeaderValueExtractorValidationError{}\n\n\/\/ Validate checks the field values on\n\/\/ ScopedRoutes_ScopeKeyBuilder_FragmentBuilder_HeaderValueExtractor_KvElement\n\/\/ with the rules defined in the proto definition for this message. If any\n\/\/ rules are violated, an error is returned.\nfunc (m *ScopedRoutes_ScopeKeyBuilder_FragmentBuilder_HeaderValueExtractor_KvElement) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\tif utf8.RuneCountInString(m.GetSeparator()) < 1 {\n\t\treturn ScopedRoutes_ScopeKeyBuilder_FragmentBuilder_HeaderValueExtractor_KvElementValidationError{\n\t\t\tfield: \"Separator\",\n\t\t\treason: \"value length must be at least 1 runes\",\n\t\t}\n\t}\n\n\tif utf8.RuneCountInString(m.GetKey()) < 1 {\n\t\treturn ScopedRoutes_ScopeKeyBuilder_FragmentBuilder_HeaderValueExtractor_KvElementValidationError{\n\t\t\tfield: \"Key\",\n\t\t\treason: \"value length must be at least 1 runes\",\n\t\t}\n\t}\n\n\treturn nil\n}\n\n\/\/ ScopedRoutes_ScopeKeyBuilder_FragmentBuilder_HeaderValueExtractor_KvElementValidationError\n\/\/ is the validation error returned by\n\/\/ ScopedRoutes_ScopeKeyBuilder_FragmentBuilder_HeaderValueExtractor_KvElement.Validate\n\/\/ if the designated constraints aren't met.\ntype ScopedRoutes_ScopeKeyBuilder_FragmentBuilder_HeaderValueExtractor_KvElementValidationError struct {\n\tfield string\n\treason string\n\tcause error\n\tkey bool\n}\n\n\/\/ Field function returns field value.\nfunc (e ScopedRoutes_ScopeKeyBuilder_FragmentBuilder_HeaderValueExtractor_KvElementValidationError) Field() string {\n\treturn e.field\n}\n\n\/\/ Reason function returns reason value.\nfunc (e ScopedRoutes_ScopeKeyBuilder_FragmentBuilder_HeaderValueExtractor_KvElementValidationError) Reason() string {\n\treturn e.reason\n}\n\n\/\/ Cause function returns cause value.\nfunc (e ScopedRoutes_ScopeKeyBuilder_FragmentBuilder_HeaderValueExtractor_KvElementValidationError) Cause() error {\n\treturn e.cause\n}\n\n\/\/ Key function returns key value.\nfunc (e ScopedRoutes_ScopeKeyBuilder_FragmentBuilder_HeaderValueExtractor_KvElementValidationError) Key() bool {\n\treturn e.key\n}\n\n\/\/ ErrorName returns error name.\nfunc (e ScopedRoutes_ScopeKeyBuilder_FragmentBuilder_HeaderValueExtractor_KvElementValidationError) ErrorName() string {\n\treturn \"ScopedRoutes_ScopeKeyBuilder_FragmentBuilder_HeaderValueExtractor_KvElementValidationError\"\n}\n\n\/\/ Error satisfies the builtin error interface\nfunc (e ScopedRoutes_ScopeKeyBuilder_FragmentBuilder_HeaderValueExtractor_KvElementValidationError) Error() string {\n\tcause := \"\"\n\tif e.cause != nil {\n\t\tcause = fmt.Sprintf(\" | caused by: %v\", e.cause)\n\t}\n\n\tkey := \"\"\n\tif e.key {\n\t\tkey = \"key for \"\n\t}\n\n\treturn fmt.Sprintf(\n\t\t\"invalid %sScopedRoutes_ScopeKeyBuilder_FragmentBuilder_HeaderValueExtractor_KvElement.%s: %s%s\",\n\t\tkey,\n\t\te.field,\n\t\te.reason,\n\t\tcause)\n}\n\nvar _ error = ScopedRoutes_ScopeKeyBuilder_FragmentBuilder_HeaderValueExtractor_KvElementValidationError{}\n\nvar _ interface {\n\tField() string\n\tReason() string\n\tKey() bool\n\tCause() error\n\tErrorName() string\n} = ScopedRoutes_ScopeKeyBuilder_FragmentBuilder_HeaderValueExtractor_KvElementValidationError{}\n","avg_line_length":26.9404553415,"max_line_length":120,"alphanum_fraction":0.7235909771} +{"size":4510,"ext":"go","lang":"Go","max_stars_count":43.0,"content":"\/\/ +build enterprise\n\npackage kong\n\nimport (\n\t\"path\"\n\t\"testing\"\n\n\t\"github.com\/stretchr\/testify\/assert\"\n)\n\nfunc TestAdminService(T *testing.T) {\n\trunWhenEnterprise(T, \">=0.33.0\", requiredFeatures{})\n\tassert := assert.New(T)\n\n\tclient, err := NewTestClient(nil, nil)\n\tassert.Nil(err)\n\tassert.NotNil(client)\n\n\tadmin := &Admin{\n\t\tEmail: String(\"admin@test.com\"),\n\t\tUsername: String(\"newAdmin\"),\n\t\tCustomID: String(\"admin123\"),\n\t\tRBACTokenEnabled: Bool(true),\n\t}\n\n\tcreatedAdmin, err := client.Admins.Create(defaultCtx, admin)\n\tassert.Nil(err)\n\tassert.NotNil(createdAdmin)\n\n\tadmin, err = client.Admins.Get(defaultCtx, createdAdmin.ID)\n\tassert.Nil(err)\n\tassert.NotNil(admin)\n\n\tadmin.CustomID = String(\"admin321\")\n\tadmin, err = client.Admins.Update(defaultCtx, admin)\n\tassert.Nil(err)\n\tassert.NotNil(admin)\n\tassert.Equal(\"admin321\", *admin.CustomID)\n\n\terr = client.Admins.Delete(defaultCtx, createdAdmin.ID)\n\tassert.Nil(err)\n}\n\nfunc TestAdminServiceWorkspace(T *testing.T) {\n\trunWhenEnterprise(T, \">=0.33.0\", requiredFeatures{})\n\tassert := assert.New(T)\n\n\tclient, err := NewTestClient(nil, nil)\n\tassert.Nil(err)\n\tassert.NotNil(client)\n\n\tworkspace := Workspace{\n\t\tName: String(\"test-workspace\"),\n\t}\n\n\tcreatedWorkspace, err := client.Workspaces.Create(defaultCtx, &workspace)\n\tassert.Nil(err)\n\tassert.NotNil(createdWorkspace)\n\n\tworkspaceClient, err := NewTestClient(String(path.Join(defaultBaseURL, *createdWorkspace.Name)), nil)\n\tassert.Nil(err)\n\tassert.NotNil(workspaceClient)\n\n\tadmin := &Admin{\n\t\tEmail: String(\"admin@test.com\"),\n\t\tUsername: String(\"newAdmin\"),\n\t\tCustomID: String(\"admin123\"),\n\t\tRBACTokenEnabled: Bool(true),\n\t}\n\n\tcreatedAdmin, err := client.Admins.Create(defaultCtx, admin)\n\tassert.Nil(err)\n\tassert.NotNil(createdAdmin)\n\n\tadmin, err = client.Admins.Get(defaultCtx, createdAdmin.ID)\n\tassert.Nil(err)\n\tassert.NotNil(admin)\n\n\tadmin.CustomID = String(\"admin321\")\n\tadmin, err = client.Admins.Update(defaultCtx, admin)\n\tassert.Nil(err)\n\tassert.NotNil(admin)\n\tassert.Equal(\"admin321\", *admin.CustomID)\n\n\terr = client.Admins.Delete(defaultCtx, createdAdmin.ID)\n\tassert.Nil(err)\n\n\terr = client.Workspaces.Delete(defaultCtx, createdWorkspace.Name)\n\tassert.Nil(err)\n}\n\nfunc TestAdminServiceList(T *testing.T) {\n\tassert := assert.New(T)\n\tclient, err := NewTestClient(nil, nil)\n\trunWhenEnterprise(T, \">=0.33.0\", requiredFeatures{})\n\n\tassert.Nil(err)\n\tassert.NotNil(client)\n\n\tadmin1 := &Admin{\n\t\tEmail: String(\"admin1@test.com\"),\n\t\tUsername: String(\"newAdmin1\"),\n\t\tCustomID: String(\"admin1\"),\n\t\tRBACTokenEnabled: Bool(true),\n\t}\n\tadmin2 := &Admin{\n\t\tEmail: String(\"admin2@test.com\"),\n\t\tUsername: String(\"newAdmin2\"),\n\t\tCustomID: String(\"admin2\"),\n\t\tRBACTokenEnabled: Bool(true),\n\t}\n\n\tcreatedAdmin1, err := client.Admins.Create(defaultCtx, admin1)\n\tassert.Nil(err)\n\tassert.NotNil(createdAdmin1)\n\tcreatedAdmin2, err := client.Admins.Create(defaultCtx, admin2)\n\tassert.Nil(err)\n\tassert.NotNil(createdAdmin2)\n\n\tadmins, _, err := client.Admins.List(defaultCtx, nil)\n\tassert.Nil(err)\n\tassert.NotNil(admins)\n\n\t\/\/ Check if RBAC is enabled\n\tres, err := client.Root(defaultCtx)\n\tassert.Nil(err)\n\trbac := res[\"configuration\"].(map[string]interface{})[\"rbac\"].(string)\n\texpectedAdmins := 3\n\tif rbac == \"off\" {\n\t\texpectedAdmins = 2\n\t}\n\tassert.Equal(expectedAdmins, len(admins))\n\n\terr = client.Admins.Delete(defaultCtx, createdAdmin1.ID)\n\tassert.Nil(err)\n\terr = client.Admins.Delete(defaultCtx, createdAdmin2.ID)\n\tassert.Nil(err)\n\n}\n\n\/\/ XXX:\n\/\/ This test requires RBAC to be enabled.\nfunc TestAdminServiceRegisterCredentials(T *testing.T) {\n\trunWhenEnterprise(T, \">=0.33.0\", requiredFeatures{rbac: true})\n\tassert := assert.New(T)\n\n\tclient, err := NewTestClient(nil, nil)\n\tassert.Nil(err)\n\tassert.NotNil(client)\n\n\tadmin := &Admin{\n\t\tEmail: String(\"admin1@test.com\"),\n\t\tUsername: String(\"newAdmin1\"),\n\t\tCustomID: String(\"admin1\"),\n\t\tRBACTokenEnabled: Bool(true),\n\t}\n\n\tadmin, err = client.Admins.Invite(defaultCtx, admin)\n\tassert.Nil(err)\n\tassert.NotNil(admin)\n\n\t\/\/ Generate a new registration URL for the Admin\n\tadmin, err = client.Admins.GenerateRegisterURL(defaultCtx, admin.ID)\n\tassert.Nil(err)\n\tassert.NotNil(admin)\n\n\tadmin.Password = String(\"bar\")\n\n\terr = client.Admins.RegisterCredentials(defaultCtx, admin)\n\tassert.Nil(err)\n\n\tadmin, err = client.Admins.Get(defaultCtx, admin.ID)\n\tassert.Nil(err)\n\tassert.NotNil(admin)\n\n\terr = client.Admins.Delete(defaultCtx, admin.ID)\n\tassert.Nil(err)\n\n}\n","avg_line_length":24.9171270718,"max_line_length":102,"alphanum_fraction":0.7033259424} +{"size":1112,"ext":"go","lang":"Go","max_stars_count":6.0,"content":"\/\/ Copyright (c) 2021 SAP SE or an SAP affiliate company. All rights reserved. This file is licensed under the Apache Software License, v. 2 except as noted otherwise in the LICENSE file\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\npackage local\n\nimport (\n\t\"context\"\n\n\t\"github.com\/gardener\/machine-controller-manager\/pkg\/util\/provider\/driver\"\n)\n\nfunc (d *localDriver) GetVolumeIDs(_ context.Context, _ *driver.GetVolumeIDsRequest) (*driver.GetVolumeIDsResponse, error) {\n\t\/\/ TODO: In the future, this could return the volumes for a local provisioner.\n\treturn &driver.GetVolumeIDsResponse{}, nil\n}\n","avg_line_length":41.1851851852,"max_line_length":186,"alphanum_fraction":0.7589928058} +{"size":4522,"ext":"go","lang":"Go","max_stars_count":null,"content":"package main\n\nimport (\n\t\"archive\/zip\"\n\t\"errors\"\n\t\"go\/ast\"\n\t\"go\/parser\"\n\t\"go\/token\"\n\t\"io\"\n\t\"net\/http\"\n\t\"os\"\n\t\"path\"\n\t\"path\/filepath\"\n\t\"regexp\"\n\t\"sort\"\n\t\"strings\"\n\n\t\"github.com\/DisgoOrg\/disgo\/api\"\n)\n\nconst disgoURL = \"https:\/\/api.github.com\/repos\/DisgoOrg\/disgo\/releases\/latest\"\n\nvar disgoGitInfo *GitInfo\n\nvar Packages []*ast.Package\n\ntype SourceInfo struct {\n\tPackageName string\n\tPackagePath string\n\tIdentifierName string\n\tComment string\n\tKind ast.ObjKind\n\tParams *string\n\tResults *string\n}\n\nvar filenameRegex = regexp.MustCompile(`\/[^\/.]+\\.go$`)\n\nfunc findInPackages(packageName string, identifierName string) *SourceInfo {\n\tfor _, pck := range Packages {\n\t\tif pck.Name == packageName {\n\t\t\tfor filePath, file := range pck.Files {\n\t\t\t\tvar lastPos token.Pos = 0\n\t\t\t\t\/\/ make slice from file.Scope.Objects & sort after Pos\n\t\t\t\tobjects := make([]*ast.Object, len(file.Scope.Objects))\n\t\t\t\ti := 0\n\t\t\t\tfor _, obj := range file.Scope.Objects {\n\t\t\t\t\tobjects[i] = obj\n\t\t\t\t\ti++\n\t\t\t\t}\n\t\t\t\tsort.SliceStable(objects, func(i, j int) bool {\n\t\t\t\t\treturn objects[i].Pos() < objects[j].Pos()\n\t\t\t\t})\n\n\t\t\t\tfor _, obj := range objects {\n\t\t\t\t\tif obj.Name == identifierName {\n\t\t\t\t\t\tcomment := \"\"\n\t\t\t\t\t\tfor _, cmt := range file.Comments {\n\t\t\t\t\t\t\tif cmt.Pos() > lastPos && cmt.Pos() < obj.Pos() {\n\t\t\t\t\t\t\t\tcomment += \"\\n\" + cmt.Text()\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tpackagePath := strings.ReplaceAll(filePath, \"\\\\\", \"\/\")\n\t\t\t\t\t\tpackagePath = strings.Replace(packagePath, \"..\/\", \"\", 1)\n\t\t\t\t\t\tpackagePath = filenameRegex.ReplaceAllString(packagePath, \"\")\n\n\t\t\t\t\t\tsourceInfo := &SourceInfo{\n\t\t\t\t\t\t\tPackageName: pck.Name,\n\t\t\t\t\t\t\tPackagePath: packagePath,\n\t\t\t\t\t\t\tIdentifierName: obj.Name,\n\t\t\t\t\t\t\tComment: comment,\n\t\t\t\t\t\t\tKind: obj.Kind,\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif obj.Kind == ast.Fun {\n\t\t\t\t\t\t\tparseFunc(obj, sourceInfo)\n\t\t\t\t\t\t}\n\t\t\t\t\t\treturn sourceInfo\n\t\t\t\t\t}\n\t\t\t\t\tlastPos = obj.Pos()\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\treturn nil\n}\n\nfunc parseFunc(obj *ast.Object, sourceInfo *SourceInfo) {\n\tfDecl := obj.Decl.(*ast.FuncDecl)\n\n\tparams := \"\"\n\tfor i, field := range fDecl.Type.Params.List {\n\t\tif i != 0 {\n\t\t\tparams += \", \"\n\t\t}\n\t\tif len(field.Names) > 0 {\n\t\t\tparams += field.Names[0].Name + \" \"\n\t\t}\n\t\tswitch x := field.Type.(type) {\n\t\tcase *ast.SelectorExpr:\n\t\t\tparams += x.X.(*ast.Ident).Name + \".\" + x.Sel.Name\n\t\tcase *ast.Ident:\n\t\t\tparams += x.Name\n\t\t}\n\t}\n\tif params != \"\" {\n\t\tsourceInfo.Params = ¶ms\n\t}\n\n\tresults := \"\"\n\tfor i, field := range fDecl.Type.Results.List {\n\t\tif i != 0 {\n\t\t\tresults += \", \"\n\t\t}\n\t\tif len(field.Names) > 0 {\n\t\t\tresults += field.Names[0].Name + \" \"\n\t\t}\n\t\tswitch x := field.Type.(type) {\n\t\tcase *ast.SelectorExpr:\n\t\t\tresults += x.X.(*ast.Ident).Name + \".\" + x.Sel.Name\n\t\tcase *ast.Ident:\n\t\t\tresults += x.Name\n\t\t}\n\n\t}\n\tif results != \"\" {\n\t\tsourceInfo.Results = &results\n\t}\n}\n\nfunc loadPackages() error {\n\tfs := token.NewFileSet()\n\terr := filepath.Walk(\"disgo\", func(path string, info os.FileInfo, err error) error {\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif info.IsDir() {\n\t\t\tpackages, err := parser.ParseDir(fs, path, nil, parser.ParseComments|parser.AllErrors)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tfor _, pck := range packages {\n\t\t\t\tPackages = append(Packages, pck)\n\t\t\t}\n\t\t}\n\t\treturn nil\n\t})\n\terr = os.Remove(\"disgo.zip\")\n\tif err != nil {\n\t\treturn err\n\t}\n\terr = os.RemoveAll(\"disgo\")\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn err\n}\n\nfunc downloadDisgo(restClient api.RestClient) error {\n\tgitInfo, err := getGitInfo(restClient)\n\tdisgoGitInfo = gitInfo\n\n\trs, err := http.Get(disgoGitInfo.ZipballURL)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tdefer func() {\n\t\t_ = rs.Body.Close()\n\t}()\n\n\tif rs.StatusCode != http.StatusOK {\n\t\treturn errors.New(\"no status code 200\")\n\t}\n\n\tout, err := os.Create(\"disgo.zip\")\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer func() {\n\t\t_ = out.Close()\n\t}()\n\n\t_, err = io.Copy(out, rs.Body)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn unzip(\".\/disgo.zip\")\n}\n\n\/\/ https:\/\/stackoverflow.com\/a\/65618964\nfunc unzip(source string) error {\n\treader, err := zip.OpenReader(source)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdirName := reader.File[0].Name\n\tfor _, file := range reader.File {\n\t\tif file.Mode().IsDir() {\n\t\t\tcontinue\n\t\t}\n\t\tfileName := strings.Replace(file.Name, dirName, \"disgo\/\", 1)\n\t\terr = os.MkdirAll(path.Dir(fileName), os.ModeDir)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\topen, err := file.Open()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tcreate, err := os.Create(fileName)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\t_, err = create.ReadFrom(open)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}\n","avg_line_length":20.2780269058,"max_line_length":89,"alphanum_fraction":0.5977443609} +{"size":637,"ext":"go","lang":"Go","max_stars_count":1.0,"content":"package matchers\n\nimport (\n\t\"reflect\"\n\n\t\"github.com\/petergtz\/pegomock\"\n\tmodels \"github.com\/runatlantis\/atlantis\/server\/events\/models\"\n)\n\nfunc AnySliceOfModelsProjectCommandContext() []models.ProjectCommandContext {\n\tpegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*([]models.ProjectCommandContext))(nil)).Elem()))\n\tvar nullValue []models.ProjectCommandContext\n\treturn nullValue\n}\n\nfunc EqSliceOfModelsProjectCommandContext(value []models.ProjectCommandContext) []models.ProjectCommandContext {\n\tpegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value})\n\tvar nullValue []models.ProjectCommandContext\n\treturn nullValue\n}\n","avg_line_length":30.3333333333,"max_line_length":114,"alphanum_fraction":0.8178963893} +{"size":2818,"ext":"go","lang":"Go","max_stars_count":1.0,"content":"package actions\n\nimport (\n\t\"context\"\n\t\"errors\"\n\t\"fmt\"\n\n\t\"github.com\/sirupsen\/logrus\"\n\tv1 \"k8s.io\/api\/core\/v1\"\n\tapierrors \"k8s.io\/apimachinery\/pkg\/api\/errors\"\n\tmetav1 \"k8s.io\/apimachinery\/pkg\/apis\/meta\/v1\"\n\t\"k8s.io\/client-go\/kubernetes\"\n\n\t\"github.com\/castai\/cluster-controller\/castai\"\n)\n\nfunc newPatchNodeHandler(log logrus.FieldLogger, clientset kubernetes.Interface) ActionHandler {\n\treturn &patchNodeHandler{\n\t\tlog: log,\n\t\tclientset: clientset,\n\t}\n}\n\ntype patchNodeHandler struct {\n\tlog logrus.FieldLogger\n\tclientset kubernetes.Interface\n}\n\nfunc (h *patchNodeHandler) Handle(ctx context.Context, data interface{}) error {\n\treq, ok := data.(*castai.ActionPatchNode)\n\tif !ok {\n\t\treturn fmt.Errorf(\"unexpected type %T for delete patch handler\", data)\n\t}\n\tfor k := range req.Labels {\n\t\tif k == \"\" {\n\t\t\treturn errors.New(\"labels contain entry with empty key\")\n\t\t}\n\t}\n\tfor k := range req.Annotations {\n\t\tif k == \"\" {\n\t\t\treturn errors.New(\"annotations contain entry with empty key\")\n\t\t}\n\t}\n\tfor _, t := range req.Taints {\n\t\tif t.Key == \"\" {\n\t\t\treturn errors.New(\"taint contain entry with empty key\")\n\t\t}\n\t}\n\n\tlog := h.log.WithField(\"node_name\", req.NodeName)\n\n\tnode, err := h.clientset.CoreV1().Nodes().Get(ctx, req.NodeName, metav1.GetOptions{})\n\tif err != nil {\n\t\tif apierrors.IsNotFound(err) {\n\t\t\tlog.Info(\"node not found, skipping patch\")\n\t\t\treturn nil\n\t\t}\n\t\treturn err\n\t}\n\n\tlog.Infof(\"patching node, labels=%v, taints=%v, annotations=%v\", req.Labels, req.Taints, req.Annotations)\n\n\treturn patchNode(ctx, h.clientset, node, func(n *v1.Node) error {\n\t\tnode.Labels = patchNodeMapField(node.Labels, req.Labels)\n\t\tnode.Annotations = patchNodeMapField(node.Annotations, req.Annotations)\n\t\tnode.Spec.Taints = patchTaints(node.Spec.Taints, req.Taints)\n\t\treturn nil\n\t})\n}\n\nfunc patchNodeMapField(values map[string]string, patch map[string]string) map[string]string {\n\tif values == nil {\n\t\tvalues = map[string]string{}\n\t}\n\n\tfor k, v := range patch {\n\t\tif k[0] == '-' {\n\t\t\tdelete(values, k[1:])\n\t\t} else {\n\t\t\tvalues[k] = v\n\t\t}\n\t}\n\treturn values\n}\n\nfunc patchTaints(taints []v1.Taint, patch []castai.NodeTaint) []v1.Taint {\n\tfor _, v := range patch {\n\t\ttaint := &v1.Taint{Key: v.Key, Value: v.Value, Effect: v1.TaintEffect(v.Effect)}\n\t\tif v.Key[0] == '-' {\n\t\t\ttaint.Key = taint.Key[1:]\n\t\t\ttaints = deleteTaint(taints, taint)\n\t\t} else if _, found := findTaint(taints, taint); !found {\n\t\t\ttaints = append(taints, *taint)\n\t\t}\n\t}\n\treturn taints\n}\n\nfunc findTaint(taints []v1.Taint, t *v1.Taint) (v1.Taint, bool) {\n\tfor _, taint := range taints {\n\t\tif taint.MatchTaint(t) {\n\t\t\treturn taint, true\n\t\t}\n\t}\n\treturn v1.Taint{}, false\n}\n\nfunc deleteTaint(taints []v1.Taint, t *v1.Taint) []v1.Taint {\n\tvar res []v1.Taint\n\tfor _, taint := range taints {\n\t\tif !taint.MatchTaint(t) {\n\t\t\tres = append(res, taint)\n\t\t}\n\t}\n\treturn res\n}\n","avg_line_length":24.0854700855,"max_line_length":106,"alphanum_fraction":0.6731724627} +{"size":5161,"ext":"go","lang":"Go","max_stars_count":8776.0,"content":"\/\/ Copyright 2020 Dolthub, Inc.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\npackage commands\n\nimport (\n\t\"bufio\"\n\t\"io\"\n\t\"regexp\"\n\t\"unicode\"\n)\n\ntype statementScanner struct {\n\t*bufio.Scanner\n\tstatementStartLine int \/\/ the line number of the first line of the last parsed statement\n\tstartLineNum int \/\/ the line number we began parsing the most recent token at\n\tlineNum int \/\/ the current line number being parsed\n\tDelimiter string\n}\n\nconst maxStatementBufferBytes = 100 * 1024 * 1024\n\nfunc NewSqlStatementScanner(input io.Reader) *statementScanner {\n\tscanner := bufio.NewScanner(input)\n\tconst initialCapacity = 512 * 1024\n\tbuf := make([]byte, initialCapacity)\n\tscanner.Buffer(buf, maxStatementBufferBytes)\n\n\ts := &statementScanner{\n\t\tScanner: scanner,\n\t\tlineNum: 1,\n\t\tDelimiter: \";\",\n\t}\n\tscanner.Split(s.scanStatements)\n\n\treturn s\n}\n\nconst (\n\tsQuote byte = '\\''\n\tdQuote = '\"'\n\tbackslash = '\\\\'\n\tbacktick = '`'\n)\n\nvar scannerDelimiterRegex = regexp.MustCompile(`(?i)^\\s*DELIMITER\\s+(\\S+)[ \\t]*([\\n]+|\\S+\\s*)?`)\n\n\/\/ ScanStatements is a split function for a Scanner that returns each SQL statement in the input as a token.\nfunc (s *statementScanner) scanStatements(data []byte, atEOF bool) (advance int, token []byte, err error) {\n\tif atEOF && len(data) == 0 {\n\t\treturn 0, nil, nil\n\t}\n\n\tvar (\n\t\tquoteChar byte \/\/ the opening quote character of the current quote being parsed, or 0 if the current parse location isn't inside a quoted string\n\t\tlastChar byte \/\/ the last character parsed\n\t\tignoreNextChar bool \/\/ whether to ignore the next character\n\t\tnumConsecutiveBackslashes int \/\/ the number of consecutive backslashes encountered\n\t\tseenNonWhitespaceChar bool \/\/ whether we have encountered a non-whitespace character since we returned the last token\n\t\tnumConsecutiveDelimiterMatches int \/\/ the consecutive number of characters that have been matched to the delimiter\n\t)\n\n\ts.startLineNum = s.lineNum\n\n\tif idxs := scannerDelimiterRegex.FindIndex(data); len(idxs) == 2 {\n\t\treturn idxs[1], data[0:idxs[1]], nil\n\t}\n\n\tfor i := 0; i < len(data); i++ {\n\t\tif !ignoreNextChar {\n\t\t\t\/\/ this doesn't handle unicode characters correctly and will break on some things, but it's only used for line\n\t\t\t\/\/ number reporting.\n\t\t\tif !seenNonWhitespaceChar && !unicode.IsSpace(rune(data[i])) {\n\t\t\t\tseenNonWhitespaceChar = true\n\t\t\t\ts.statementStartLine = s.lineNum\n\t\t\t}\n\t\t\t\/\/ check if we've matched the delimiter string\n\t\t\tif quoteChar == 0 && data[i] == s.Delimiter[numConsecutiveDelimiterMatches] {\n\t\t\t\tnumConsecutiveDelimiterMatches++\n\t\t\t\tif numConsecutiveDelimiterMatches == len(s.Delimiter) {\n\t\t\t\t\ts.startLineNum = s.lineNum\n\t\t\t\t\t_, _, _ = s.resetState()\n\t\t\t\t\tremovalLength := len(s.Delimiter) - 1 \/\/ We remove the delimiter so it depends on the length\n\t\t\t\t\treturn i + 1, data[0 : i-removalLength], nil\n\t\t\t\t}\n\t\t\t\tlastChar = data[i]\n\t\t\t\tcontinue\n\t\t\t} else {\n\t\t\t\tnumConsecutiveDelimiterMatches = 0\n\t\t\t}\n\n\t\t\tswitch data[i] {\n\t\t\tcase '\\n':\n\t\t\t\ts.lineNum++\n\t\t\tcase backslash:\n\t\t\t\tnumConsecutiveBackslashes++\n\t\t\tcase sQuote, dQuote, backtick:\n\t\t\t\tprevNumConsecutiveBackslashes := numConsecutiveBackslashes\n\t\t\t\tnumConsecutiveBackslashes = 0\n\n\t\t\t\t\/\/ escaped quote character\n\t\t\t\tif lastChar == backslash && prevNumConsecutiveBackslashes%2 == 1 {\n\t\t\t\t\tbreak\n\t\t\t\t}\n\n\t\t\t\t\/\/ currently in a quoted string\n\t\t\t\tif quoteChar != 0 {\n\n\t\t\t\t\t\/\/ end quote or two consecutive quote characters (a form of escaping quote chars)\n\t\t\t\t\tif quoteChar == data[i] {\n\t\t\t\t\t\tvar nextChar byte = 0\n\t\t\t\t\t\tif i+1 < len(data) {\n\t\t\t\t\t\t\tnextChar = data[i+1]\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tif nextChar == quoteChar {\n\t\t\t\t\t\t\t\/\/ escaped quote. skip the next character\n\t\t\t\t\t\t\tignoreNextChar = true\n\t\t\t\t\t\t\tbreak\n\t\t\t\t\t\t} else if atEOF || i+1 < len(data) {\n\t\t\t\t\t\t\t\/\/ end quote\n\t\t\t\t\t\t\tquoteChar = 0\n\t\t\t\t\t\t\tbreak\n\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\/\/ need more data to make a decision\n\t\t\t\t\t\t\treturn s.resetState()\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t\t\/\/ embedded quote ('\"' or \"'\")\n\t\t\t\t\tbreak\n\t\t\t\t}\n\n\t\t\t\t\/\/ open quote\n\t\t\t\tquoteChar = data[i]\n\t\t\tdefault:\n\t\t\t\tnumConsecutiveBackslashes = 0\n\t\t\t}\n\t\t} else {\n\t\t\tignoreNextChar = false\n\t\t}\n\n\t\tlastChar = data[i]\n\t}\n\n\t\/\/ If we're at EOF, we have a final, non-terminated line. Return it.\n\tif atEOF {\n\t\treturn len(data), data, nil\n\t}\n\n\t\/\/ Request more data.\n\treturn s.resetState()\n}\n\n\/\/ resetState resets the internal state of the scanner and returns the \"more data\" response for a split function\nfunc (s *statementScanner) resetState() (advance int, token []byte, err error) {\n\t\/\/ rewind the line number to where we started parsing this token\n\ts.lineNum = s.startLineNum\n\treturn 0, nil, nil\n}\n","avg_line_length":30.0058139535,"max_line_length":167,"alphanum_fraction":0.6704127107} +{"size":28717,"ext":"go","lang":"Go","max_stars_count":11364.0,"content":"package logstash\n\nimport (\n\t\"fmt\"\n\t\"net\"\n\t\"net\/http\"\n\t\"net\/http\/httptest\"\n\t\"net\/url\"\n\t\"testing\"\n\n\t\"github.com\/influxdata\/telegraf\/testutil\"\n\t\"github.com\/stretchr\/testify\/require\"\n)\n\nvar logstashTest = NewLogstash()\n\nvar (\n\tlogstash5accPipelineStats testutil.Accumulator\n\tlogstash6accPipelinesStats testutil.Accumulator\n\tlogstash7accPipelinesStats testutil.Accumulator\n\tlogstash5accProcessStats testutil.Accumulator\n\tlogstash6accProcessStats testutil.Accumulator\n\tlogstash5accJVMStats testutil.Accumulator\n\tlogstash6accJVMStats testutil.Accumulator\n)\n\nfunc Test_Logstash5GatherProcessStats(test *testing.T) {\n\tfakeServer := httptest.NewUnstartedServer(http.HandlerFunc(func(writer http.ResponseWriter, request *http.Request) {\n\t\twriter.Header().Set(\"Content-Type\", \"application\/json\")\n\t\t_, err := fmt.Fprintf(writer, \"%s\", string(logstash5ProcessJSON))\n\t\trequire.NoError(test, err)\n\t}))\n\trequestURL, err := url.Parse(logstashTest.URL)\n\trequire.NoErrorf(test, err, \"Can't connect to: %s\", logstashTest.URL)\n\tfakeServer.Listener, _ = net.Listen(\"tcp\", fmt.Sprintf(\"%s:%s\", requestURL.Hostname(), requestURL.Port()))\n\tfakeServer.Start()\n\tdefer fakeServer.Close()\n\n\tif logstashTest.client == nil {\n\t\tclient, err := logstashTest.createHTTPClient()\n\t\trequire.NoError(test, err, \"Can't createHTTPClient\")\n\t\tlogstashTest.client = client\n\t}\n\n\terr = logstashTest.gatherProcessStats(logstashTest.URL+processStats, &logstash5accProcessStats)\n\trequire.NoError(test, err, \"Can't gather Process stats\")\n\n\tlogstash5accProcessStats.AssertContainsTaggedFields(\n\t\ttest,\n\t\t\"logstash_process\",\n\t\tmap[string]interface{}{\n\t\t\t\"open_file_descriptors\": float64(89.0),\n\t\t\t\"max_file_descriptors\": float64(1.048576e+06),\n\t\t\t\"cpu_percent\": float64(3.0),\n\t\t\t\"cpu_load_average_5m\": float64(0.61),\n\t\t\t\"cpu_load_average_15m\": float64(0.54),\n\t\t\t\"mem_total_virtual_in_bytes\": float64(4.809506816e+09),\n\t\t\t\"cpu_total_in_millis\": float64(1.5526e+11),\n\t\t\t\"cpu_load_average_1m\": float64(0.49),\n\t\t\t\"peak_open_file_descriptors\": float64(100.0),\n\t\t},\n\t\tmap[string]string{\n\t\t\t\"node_id\": string(\"a360d8cf-6289-429d-8419-6145e324b574\"),\n\t\t\t\"node_name\": string(\"node-5-test\"),\n\t\t\t\"source\": string(\"node-5\"),\n\t\t\t\"node_version\": string(\"5.3.0\"),\n\t\t},\n\t)\n}\n\nfunc Test_Logstash6GatherProcessStats(test *testing.T) {\n\tfakeServer := httptest.NewUnstartedServer(http.HandlerFunc(func(writer http.ResponseWriter, request *http.Request) {\n\t\twriter.Header().Set(\"Content-Type\", \"application\/json\")\n\t\t_, err := fmt.Fprintf(writer, \"%s\", string(logstash6ProcessJSON))\n\t\trequire.NoError(test, err)\n\t}))\n\trequestURL, err := url.Parse(logstashTest.URL)\n\trequire.NoErrorf(test, err, \"Can't connect to: %s\", logstashTest.URL)\n\tfakeServer.Listener, _ = net.Listen(\"tcp\", fmt.Sprintf(\"%s:%s\", requestURL.Hostname(), requestURL.Port()))\n\tfakeServer.Start()\n\tdefer fakeServer.Close()\n\n\tif logstashTest.client == nil {\n\t\tclient, err := logstashTest.createHTTPClient()\n\t\trequire.NoError(test, err, \"Can't createHTTPClient\")\n\t\tlogstashTest.client = client\n\t}\n\n\terr = logstashTest.gatherProcessStats(logstashTest.URL+processStats, &logstash6accProcessStats)\n\trequire.NoError(test, err, \"Can't gather Process stats\")\n\n\tlogstash6accProcessStats.AssertContainsTaggedFields(\n\t\ttest,\n\t\t\"logstash_process\",\n\t\tmap[string]interface{}{\n\t\t\t\"open_file_descriptors\": float64(133.0),\n\t\t\t\"max_file_descriptors\": float64(262144.0),\n\t\t\t\"cpu_percent\": float64(0.0),\n\t\t\t\"cpu_load_average_5m\": float64(42.4),\n\t\t\t\"cpu_load_average_15m\": float64(38.95),\n\t\t\t\"mem_total_virtual_in_bytes\": float64(17923452928.0),\n\t\t\t\"cpu_total_in_millis\": float64(5841460),\n\t\t\t\"cpu_load_average_1m\": float64(48.2),\n\t\t\t\"peak_open_file_descriptors\": float64(145.0),\n\t\t},\n\t\tmap[string]string{\n\t\t\t\"node_id\": string(\"3044f675-21ce-4335-898a-8408aa678245\"),\n\t\t\t\"node_name\": string(\"node-6-test\"),\n\t\t\t\"source\": string(\"node-6\"),\n\t\t\t\"node_version\": string(\"6.4.2\"),\n\t\t},\n\t)\n}\n\nfunc Test_Logstash5GatherPipelineStats(test *testing.T) {\n\t\/\/logstash5accPipelineStats.SetDebug(true)\n\tfakeServer := httptest.NewUnstartedServer(http.HandlerFunc(func(writer http.ResponseWriter, request *http.Request) {\n\t\twriter.Header().Set(\"Content-Type\", \"application\/json\")\n\t\t_, err := fmt.Fprintf(writer, \"%s\", string(logstash5PipelineJSON))\n\t\trequire.NoError(test, err)\n\t}))\n\trequestURL, err := url.Parse(logstashTest.URL)\n\trequire.NoErrorf(test, err, \"Can't connect to: %s\", logstashTest.URL)\n\tfakeServer.Listener, _ = net.Listen(\"tcp\", fmt.Sprintf(\"%s:%s\", requestURL.Hostname(), requestURL.Port()))\n\tfakeServer.Start()\n\tdefer fakeServer.Close()\n\n\tif logstashTest.client == nil {\n\t\tclient, err := logstashTest.createHTTPClient()\n\t\trequire.NoError(test, err, \"Can't createHTTPClient\")\n\t\tlogstashTest.client = client\n\t}\n\n\terr = logstashTest.gatherPipelineStats(logstashTest.URL+pipelineStats, &logstash5accPipelineStats)\n\trequire.NoError(test, err, \"Can't gather Pipeline stats\")\n\n\tlogstash5accPipelineStats.AssertContainsTaggedFields(\n\t\ttest,\n\t\t\"logstash_events\",\n\t\tmap[string]interface{}{\n\t\t\t\"duration_in_millis\": float64(1151.0),\n\t\t\t\"in\": float64(1269.0),\n\t\t\t\"filtered\": float64(1269.0),\n\t\t\t\"out\": float64(1269.0),\n\t\t},\n\t\tmap[string]string{\n\t\t\t\"node_id\": string(\"a360d8cf-6289-429d-8419-6145e324b574\"),\n\t\t\t\"node_name\": string(\"node-5-test\"),\n\t\t\t\"source\": string(\"node-5\"),\n\t\t\t\"node_version\": string(\"5.3.0\"),\n\t\t},\n\t)\n\n\tfields := make(map[string]interface{})\n\tfields[\"queue_push_duration_in_millis\"] = float64(32.0)\n\tfields[\"out\"] = float64(2.0)\n\n\tlogstash5accPipelineStats.AssertContainsTaggedFields(\n\t\ttest,\n\t\t\"logstash_plugins\",\n\t\tfields,\n\t\tmap[string]string{\n\t\t\t\"node_id\": string(\"a360d8cf-6289-429d-8419-6145e324b574\"),\n\t\t\t\"node_name\": string(\"node-5-test\"),\n\t\t\t\"source\": string(\"node-5\"),\n\t\t\t\"node_version\": string(\"5.3.0\"),\n\t\t\t\"plugin_name\": string(\"beats\"),\n\t\t\t\"plugin_id\": string(\"a35197a509596954e905e38521bae12b1498b17d-1\"),\n\t\t\t\"plugin_type\": string(\"input\"),\n\t\t},\n\t)\n\n\tlogstash5accPipelineStats.AssertContainsTaggedFields(\n\t\ttest,\n\t\t\"logstash_plugins\",\n\t\tmap[string]interface{}{\n\t\t\t\"duration_in_millis\": float64(360.0),\n\t\t\t\"in\": float64(1269.0),\n\t\t\t\"out\": float64(1269.0),\n\t\t},\n\t\tmap[string]string{\n\t\t\t\"node_id\": string(\"a360d8cf-6289-429d-8419-6145e324b574\"),\n\t\t\t\"node_name\": string(\"node-5-test\"),\n\t\t\t\"source\": string(\"node-5\"),\n\t\t\t\"node_version\": string(\"5.3.0\"),\n\t\t\t\"plugin_name\": string(\"stdout\"),\n\t\t\t\"plugin_id\": string(\"582d5c2becb582a053e1e9a6bcc11d49b69a6dfd-2\"),\n\t\t\t\"plugin_type\": string(\"output\"),\n\t\t},\n\t)\n\n\tlogstash5accPipelineStats.AssertContainsTaggedFields(\n\t\ttest,\n\t\t\"logstash_plugins\",\n\t\tmap[string]interface{}{\n\t\t\t\"duration_in_millis\": float64(228.0),\n\t\t\t\"in\": float64(1269.0),\n\t\t\t\"out\": float64(1269.0),\n\t\t},\n\t\tmap[string]string{\n\t\t\t\"node_id\": string(\"a360d8cf-6289-429d-8419-6145e324b574\"),\n\t\t\t\"node_name\": string(\"node-5-test\"),\n\t\t\t\"source\": string(\"node-5\"),\n\t\t\t\"node_version\": string(\"5.3.0\"),\n\t\t\t\"plugin_name\": string(\"s3\"),\n\t\t\t\"plugin_id\": string(\"582d5c2becb582a053e1e9a6bcc11d49b69a6dfd-3\"),\n\t\t\t\"plugin_type\": string(\"output\"),\n\t\t},\n\t)\n}\n\nfunc Test_Logstash6GatherPipelinesStats(test *testing.T) {\n\t\/\/logstash6accPipelinesStats.SetDebug(true)\n\tfakeServer := httptest.NewUnstartedServer(http.HandlerFunc(func(writer http.ResponseWriter, request *http.Request) {\n\t\twriter.Header().Set(\"Content-Type\", \"application\/json\")\n\t\t_, err := fmt.Fprintf(writer, \"%s\", string(logstash6PipelinesJSON))\n\t\trequire.NoError(test, err)\n\t}))\n\trequestURL, err := url.Parse(logstashTest.URL)\n\trequire.NoErrorf(test, err, \"Can't connect to: %s\", logstashTest.URL)\n\tfakeServer.Listener, _ = net.Listen(\"tcp\", fmt.Sprintf(\"%s:%s\", requestURL.Hostname(), requestURL.Port()))\n\tfakeServer.Start()\n\tdefer fakeServer.Close()\n\n\tif logstashTest.client == nil {\n\t\tclient, err := logstashTest.createHTTPClient()\n\t\trequire.NoError(test, err, \"Can't createHTTPClient\")\n\t\tlogstashTest.client = client\n\t}\n\n\terr = logstashTest.gatherPipelinesStats(logstashTest.URL+pipelineStats, &logstash6accPipelinesStats)\n\trequire.NoError(test, err, \"Can't gather Pipeline stats\")\n\n\tfields := make(map[string]interface{})\n\tfields[\"duration_in_millis\"] = float64(8540751.0)\n\tfields[\"queue_push_duration_in_millis\"] = float64(366.0)\n\tfields[\"in\"] = float64(180659.0)\n\tfields[\"filtered\"] = float64(180659.0)\n\tfields[\"out\"] = float64(180659.0)\n\n\tlogstash6accPipelinesStats.AssertContainsTaggedFields(\n\t\ttest,\n\t\t\"logstash_events\",\n\t\tfields,\n\t\tmap[string]string{\n\t\t\t\"node_id\": string(\"3044f675-21ce-4335-898a-8408aa678245\"),\n\t\t\t\"node_name\": string(\"node-6-test\"),\n\t\t\t\"source\": string(\"node-6\"),\n\t\t\t\"node_version\": string(\"6.4.2\"),\n\t\t\t\"pipeline\": string(\"main\"),\n\t\t},\n\t)\n\n\tfields = make(map[string]interface{})\n\tfields[\"queue_push_duration_in_millis\"] = float64(366.0)\n\tfields[\"out\"] = float64(180659.0)\n\n\tlogstash6accPipelinesStats.AssertContainsTaggedFields(\n\t\ttest,\n\t\t\"logstash_plugins\",\n\t\tfields,\n\t\tmap[string]string{\n\t\t\t\"node_id\": string(\"3044f675-21ce-4335-898a-8408aa678245\"),\n\t\t\t\"node_name\": string(\"node-6-test\"),\n\t\t\t\"source\": string(\"node-6\"),\n\t\t\t\"node_version\": string(\"6.4.2\"),\n\t\t\t\"pipeline\": string(\"main\"),\n\t\t\t\"plugin_name\": string(\"kafka\"),\n\t\t\t\"plugin_id\": string(\"input-kafka\"),\n\t\t\t\"plugin_type\": string(\"input\"),\n\t\t},\n\t)\n\n\tlogstash6accPipelinesStats.AssertContainsTaggedFields(\n\t\ttest,\n\t\t\"logstash_plugins\",\n\t\tmap[string]interface{}{\n\t\t\t\"duration_in_millis\": float64(2117.0),\n\t\t\t\"in\": float64(27641.0),\n\t\t\t\"out\": float64(27641.0),\n\t\t},\n\t\tmap[string]string{\n\t\t\t\"node_id\": string(\"3044f675-21ce-4335-898a-8408aa678245\"),\n\t\t\t\"node_name\": string(\"node-6-test\"),\n\t\t\t\"source\": string(\"node-6\"),\n\t\t\t\"node_version\": string(\"6.4.2\"),\n\t\t\t\"pipeline\": string(\"main\"),\n\t\t\t\"plugin_name\": string(\"mutate\"),\n\t\t\t\"plugin_id\": string(\"155b0ad18abbf3df1e0cb7bddef0d77c5ba699efe5a0f8a28502d140549baf54\"),\n\t\t\t\"plugin_type\": string(\"filter\"),\n\t\t},\n\t)\n\n\tlogstash6accPipelinesStats.AssertContainsTaggedFields(\n\t\ttest,\n\t\t\"logstash_plugins\",\n\t\tmap[string]interface{}{\n\t\t\t\"duration_in_millis\": float64(2117.0),\n\t\t\t\"in\": float64(27641.0),\n\t\t\t\"out\": float64(27641.0),\n\t\t},\n\t\tmap[string]string{\n\t\t\t\"node_id\": string(\"3044f675-21ce-4335-898a-8408aa678245\"),\n\t\t\t\"node_name\": string(\"node-6-test\"),\n\t\t\t\"source\": string(\"node-6\"),\n\t\t\t\"node_version\": string(\"6.4.2\"),\n\t\t\t\"pipeline\": string(\"main\"),\n\t\t\t\"plugin_name\": string(\"mutate\"),\n\t\t\t\"plugin_id\": string(\"155b0ad18abbf3df1e0cb7bddef0d77c5ba699efe5a0f8a28502d140549baf54\"),\n\t\t\t\"plugin_type\": string(\"filter\"),\n\t\t},\n\t)\n\n\tlogstash6accPipelinesStats.AssertContainsTaggedFields(\n\t\ttest,\n\t\t\"logstash_plugins\",\n\t\tmap[string]interface{}{\n\t\t\t\"duration_in_millis\": float64(13149.0),\n\t\t\t\"in\": float64(180659.0),\n\t\t\t\"out\": float64(177549.0),\n\t\t},\n\t\tmap[string]string{\n\t\t\t\"node_id\": string(\"3044f675-21ce-4335-898a-8408aa678245\"),\n\t\t\t\"node_name\": string(\"node-6-test\"),\n\t\t\t\"source\": string(\"node-6\"),\n\t\t\t\"node_version\": string(\"6.4.2\"),\n\t\t\t\"pipeline\": string(\"main\"),\n\t\t\t\"plugin_name\": string(\"date\"),\n\t\t\t\"plugin_id\": string(\"d079424bb6b7b8c7c61d9c5e0ddae445e92fa9ffa2e8690b0a669f7c690542f0\"),\n\t\t\t\"plugin_type\": string(\"filter\"),\n\t\t},\n\t)\n\n\tlogstash6accPipelinesStats.AssertContainsTaggedFields(\n\t\ttest,\n\t\t\"logstash_plugins\",\n\t\tmap[string]interface{}{\n\t\t\t\"duration_in_millis\": float64(2814.0),\n\t\t\t\"in\": float64(76602.0),\n\t\t\t\"out\": float64(76602.0),\n\t\t},\n\t\tmap[string]string{\n\t\t\t\"node_id\": string(\"3044f675-21ce-4335-898a-8408aa678245\"),\n\t\t\t\"node_name\": string(\"node-6-test\"),\n\t\t\t\"source\": string(\"node-6\"),\n\t\t\t\"node_version\": string(\"6.4.2\"),\n\t\t\t\"pipeline\": string(\"main\"),\n\t\t\t\"plugin_name\": string(\"mutate\"),\n\t\t\t\"plugin_id\": string(\"25afa60ab6dc30512fe80efa3493e4928b5b1b109765b7dc46a3e4bbf293d2d4\"),\n\t\t\t\"plugin_type\": string(\"filter\"),\n\t\t},\n\t)\n\n\tlogstash6accPipelinesStats.AssertContainsTaggedFields(\n\t\ttest,\n\t\t\"logstash_plugins\",\n\t\tmap[string]interface{}{\n\t\t\t\"duration_in_millis\": float64(9.0),\n\t\t\t\"in\": float64(934.0),\n\t\t\t\"out\": float64(934.0),\n\t\t},\n\t\tmap[string]string{\n\t\t\t\"node_id\": string(\"3044f675-21ce-4335-898a-8408aa678245\"),\n\t\t\t\"node_name\": string(\"node-6-test\"),\n\t\t\t\"source\": string(\"node-6\"),\n\t\t\t\"node_version\": string(\"6.4.2\"),\n\t\t\t\"pipeline\": string(\"main\"),\n\t\t\t\"plugin_name\": string(\"mutate\"),\n\t\t\t\"plugin_id\": string(\"2d9fa8f74eeb137bfa703b8050bad7d76636fface729e4585b789b5fc9bed668\"),\n\t\t\t\"plugin_type\": string(\"filter\"),\n\t\t},\n\t)\n\n\tlogstash6accPipelinesStats.AssertContainsTaggedFields(\n\t\ttest,\n\t\t\"logstash_plugins\",\n\t\tmap[string]interface{}{\n\t\t\t\"duration_in_millis\": float64(173.0),\n\t\t\t\"in\": float64(3110.0),\n\t\t\t\"out\": float64(0.0),\n\t\t},\n\t\tmap[string]string{\n\t\t\t\"node_id\": string(\"3044f675-21ce-4335-898a-8408aa678245\"),\n\t\t\t\"node_name\": string(\"node-6-test\"),\n\t\t\t\"source\": string(\"node-6\"),\n\t\t\t\"node_version\": string(\"6.4.2\"),\n\t\t\t\"pipeline\": string(\"main\"),\n\t\t\t\"plugin_name\": string(\"drop\"),\n\t\t\t\"plugin_id\": string(\"4ed14c9ef0198afe16c31200041e98d321cb5c2e6027e30b077636b8c4842110\"),\n\t\t\t\"plugin_type\": string(\"filter\"),\n\t\t},\n\t)\n\n\tlogstash6accPipelinesStats.AssertContainsTaggedFields(\n\t\ttest,\n\t\t\"logstash_plugins\",\n\t\tmap[string]interface{}{\n\t\t\t\"duration_in_millis\": float64(5605.0),\n\t\t\t\"in\": float64(75482.0),\n\t\t\t\"out\": float64(75482.0),\n\t\t},\n\t\tmap[string]string{\n\t\t\t\"node_id\": string(\"3044f675-21ce-4335-898a-8408aa678245\"),\n\t\t\t\"node_name\": string(\"node-6-test\"),\n\t\t\t\"source\": string(\"node-6\"),\n\t\t\t\"node_version\": string(\"6.4.2\"),\n\t\t\t\"pipeline\": string(\"main\"),\n\t\t\t\"plugin_name\": string(\"mutate\"),\n\t\t\t\"plugin_id\": string(\"358ce1eb387de7cd5711c2fb4de64cd3b12e5ca9a4c45f529516bcb053a31df4\"),\n\t\t\t\"plugin_type\": string(\"filter\"),\n\t\t},\n\t)\n\n\tlogstash6accPipelinesStats.AssertContainsTaggedFields(\n\t\ttest,\n\t\t\"logstash_plugins\",\n\t\tmap[string]interface{}{\n\t\t\t\"duration_in_millis\": float64(313992.0),\n\t\t\t\"in\": float64(180659.0),\n\t\t\t\"out\": float64(180659.0),\n\t\t},\n\t\tmap[string]string{\n\t\t\t\"node_id\": string(\"3044f675-21ce-4335-898a-8408aa678245\"),\n\t\t\t\"node_name\": string(\"node-6-test\"),\n\t\t\t\"source\": string(\"node-6\"),\n\t\t\t\"node_version\": string(\"6.4.2\"),\n\t\t\t\"pipeline\": string(\"main\"),\n\t\t\t\"plugin_name\": string(\"csv\"),\n\t\t\t\"plugin_id\": string(\"82a9bbb02fff37a63c257c1f146b0a36273c7cbbebe83c0a51f086e5280bf7bb\"),\n\t\t\t\"plugin_type\": string(\"filter\"),\n\t\t},\n\t)\n\n\tlogstash6accPipelinesStats.AssertContainsTaggedFields(\n\t\ttest,\n\t\t\"logstash_plugins\",\n\t\tmap[string]interface{}{\n\t\t\t\"duration_in_millis\": float64(0.0),\n\t\t\t\"in\": float64(0.0),\n\t\t\t\"out\": float64(0.0),\n\t\t},\n\t\tmap[string]string{\n\t\t\t\"node_id\": string(\"3044f675-21ce-4335-898a-8408aa678245\"),\n\t\t\t\"node_name\": string(\"node-6-test\"),\n\t\t\t\"source\": string(\"node-6\"),\n\t\t\t\"node_version\": string(\"6.4.2\"),\n\t\t\t\"pipeline\": string(\"main\"),\n\t\t\t\"plugin_name\": string(\"mutate\"),\n\t\t\t\"plugin_id\": string(\"8fb13a8cdd4257b52724d326aa1549603ffdd4e4fde6d20720c96b16238c18c3\"),\n\t\t\t\"plugin_type\": string(\"filter\"),\n\t\t},\n\t)\n\n\tlogstash6accPipelinesStats.AssertContainsTaggedFields(\n\t\ttest,\n\t\t\"logstash_plugins\",\n\t\tmap[string]interface{}{\n\t\t\t\"duration_in_millis\": float64(651386.0),\n\t\t\t\"in\": float64(177549.0),\n\t\t\t\"out\": float64(177549.0),\n\t\t},\n\t\tmap[string]string{\n\t\t\t\"node_id\": string(\"3044f675-21ce-4335-898a-8408aa678245\"),\n\t\t\t\"node_name\": string(\"node-6-test\"),\n\t\t\t\"source\": string(\"node-6\"),\n\t\t\t\"node_version\": string(\"6.4.2\"),\n\t\t\t\"pipeline\": string(\"main\"),\n\t\t\t\"plugin_name\": string(\"elasticsearch\"),\n\t\t\t\"plugin_id\": string(\"output-elk\"),\n\t\t\t\"plugin_type\": string(\"output\"),\n\t\t},\n\t)\n\n\tlogstash6accPipelinesStats.AssertContainsTaggedFields(\n\t\ttest,\n\t\t\"logstash_plugins\",\n\t\tmap[string]interface{}{\n\t\t\t\"duration_in_millis\": float64(186751.0),\n\t\t\t\"in\": float64(177549.0),\n\t\t\t\"out\": float64(177549.0),\n\t\t},\n\t\tmap[string]string{\n\t\t\t\"node_id\": string(\"3044f675-21ce-4335-898a-8408aa678245\"),\n\t\t\t\"node_name\": string(\"node-6-test\"),\n\t\t\t\"source\": string(\"node-6\"),\n\t\t\t\"node_version\": string(\"6.4.2\"),\n\t\t\t\"pipeline\": string(\"main\"),\n\t\t\t\"plugin_name\": string(\"kafka\"),\n\t\t\t\"plugin_id\": string(\"output-kafka1\"),\n\t\t\t\"plugin_type\": string(\"output\"),\n\t\t},\n\t)\n\n\tlogstash6accPipelinesStats.AssertContainsTaggedFields(\n\t\ttest,\n\t\t\"logstash_plugins\",\n\t\tmap[string]interface{}{\n\t\t\t\"duration_in_millis\": float64(7335196.0),\n\t\t\t\"in\": float64(177549.0),\n\t\t\t\"out\": float64(177549.0),\n\t\t},\n\t\tmap[string]string{\n\t\t\t\"node_id\": string(\"3044f675-21ce-4335-898a-8408aa678245\"),\n\t\t\t\"node_name\": string(\"node-6-test\"),\n\t\t\t\"source\": string(\"node-6\"),\n\t\t\t\"node_version\": string(\"6.4.2\"),\n\t\t\t\"pipeline\": string(\"main\"),\n\t\t\t\"plugin_name\": string(\"kafka\"),\n\t\t\t\"plugin_id\": string(\"output-kafka2\"),\n\t\t\t\"plugin_type\": string(\"output\"),\n\t\t},\n\t)\n\n\tlogstash6accPipelinesStats.AssertContainsTaggedFields(\n\t\ttest,\n\t\t\"logstash_queue\",\n\t\tmap[string]interface{}{\n\t\t\t\"events\": float64(103),\n\t\t\t\"free_space_in_bytes\": float64(36307369984),\n\t\t\t\"max_queue_size_in_bytes\": float64(1073741824),\n\t\t\t\"max_unread_events\": float64(0),\n\t\t\t\"page_capacity_in_bytes\": float64(67108864),\n\t\t\t\"queue_size_in_bytes\": float64(1872391),\n\t\t},\n\t\tmap[string]string{\n\t\t\t\"node_id\": string(\"3044f675-21ce-4335-898a-8408aa678245\"),\n\t\t\t\"node_name\": string(\"node-6-test\"),\n\t\t\t\"source\": string(\"node-6\"),\n\t\t\t\"node_version\": string(\"6.4.2\"),\n\t\t\t\"pipeline\": string(\"main\"),\n\t\t\t\"queue_type\": string(\"persisted\"),\n\t\t},\n\t)\n}\n\nfunc Test_Logstash5GatherJVMStats(test *testing.T) {\n\tfakeServer := httptest.NewUnstartedServer(http.HandlerFunc(func(writer http.ResponseWriter, request *http.Request) {\n\t\twriter.Header().Set(\"Content-Type\", \"application\/json\")\n\t\t_, err := fmt.Fprintf(writer, \"%s\", string(logstash5JvmJSON))\n\t\trequire.NoError(test, err)\n\t}))\n\trequestURL, err := url.Parse(logstashTest.URL)\n\trequire.NoErrorf(test, err, \"Can't connect to: %s\", logstashTest.URL)\n\tfakeServer.Listener, _ = net.Listen(\"tcp\", fmt.Sprintf(\"%s:%s\", requestURL.Hostname(), requestURL.Port()))\n\tfakeServer.Start()\n\tdefer fakeServer.Close()\n\n\tif logstashTest.client == nil {\n\t\tclient, err := logstashTest.createHTTPClient()\n\t\trequire.NoError(test, err, \"Can't createHTTPClient\")\n\t\tlogstashTest.client = client\n\t}\n\n\terr = logstashTest.gatherJVMStats(logstashTest.URL+jvmStats, &logstash5accJVMStats)\n\trequire.NoError(test, err, \"Can't gather JVM stats\")\n\n\tlogstash5accJVMStats.AssertContainsTaggedFields(\n\t\ttest,\n\t\t\"logstash_jvm\",\n\t\tmap[string]interface{}{\n\t\t\t\"mem_pools_young_max_in_bytes\": float64(5.5836672e+08),\n\t\t\t\"mem_pools_young_committed_in_bytes\": float64(1.43261696e+08),\n\t\t\t\"mem_heap_committed_in_bytes\": float64(5.1904512e+08),\n\t\t\t\"threads_count\": float64(29.0),\n\t\t\t\"mem_pools_old_peak_used_in_bytes\": float64(1.27900864e+08),\n\t\t\t\"mem_pools_old_peak_max_in_bytes\": float64(7.2482816e+08),\n\t\t\t\"mem_heap_used_percent\": float64(16.0),\n\t\t\t\"gc_collectors_young_collection_time_in_millis\": float64(3235.0),\n\t\t\t\"mem_pools_survivor_committed_in_bytes\": float64(1.7825792e+07),\n\t\t\t\"mem_pools_young_used_in_bytes\": float64(7.6049384e+07),\n\t\t\t\"mem_non_heap_committed_in_bytes\": float64(2.91487744e+08),\n\t\t\t\"mem_pools_survivor_peak_max_in_bytes\": float64(3.4865152e+07),\n\t\t\t\"mem_pools_young_peak_max_in_bytes\": float64(2.7918336e+08),\n\t\t\t\"uptime_in_millis\": float64(4.803461e+06),\n\t\t\t\"mem_pools_survivor_peak_used_in_bytes\": float64(8.912896e+06),\n\t\t\t\"mem_pools_survivor_max_in_bytes\": float64(6.9730304e+07),\n\t\t\t\"gc_collectors_old_collection_count\": float64(2.0),\n\t\t\t\"mem_pools_survivor_used_in_bytes\": float64(9.419672e+06),\n\t\t\t\"mem_pools_old_used_in_bytes\": float64(2.55801728e+08),\n\t\t\t\"mem_pools_old_max_in_bytes\": float64(1.44965632e+09),\n\t\t\t\"mem_pools_young_peak_used_in_bytes\": float64(7.1630848e+07),\n\t\t\t\"mem_heap_used_in_bytes\": float64(3.41270784e+08),\n\t\t\t\"mem_heap_max_in_bytes\": float64(2.077753344e+09),\n\t\t\t\"gc_collectors_young_collection_count\": float64(616.0),\n\t\t\t\"threads_peak_count\": float64(31.0),\n\t\t\t\"mem_pools_old_committed_in_bytes\": float64(3.57957632e+08),\n\t\t\t\"gc_collectors_old_collection_time_in_millis\": float64(114.0),\n\t\t\t\"mem_non_heap_used_in_bytes\": float64(2.68905936e+08),\n\t\t},\n\t\tmap[string]string{\n\t\t\t\"node_id\": string(\"a360d8cf-6289-429d-8419-6145e324b574\"),\n\t\t\t\"node_name\": string(\"node-5-test\"),\n\t\t\t\"source\": string(\"node-5\"),\n\t\t\t\"node_version\": string(\"5.3.0\"),\n\t\t},\n\t)\n}\n\nfunc Test_Logstash6GatherJVMStats(test *testing.T) {\n\tfakeServer := httptest.NewUnstartedServer(http.HandlerFunc(func(writer http.ResponseWriter, request *http.Request) {\n\t\twriter.Header().Set(\"Content-Type\", \"application\/json\")\n\t\t_, err := fmt.Fprintf(writer, \"%s\", string(logstash6JvmJSON))\n\t\trequire.NoError(test, err)\n\t}))\n\trequestURL, err := url.Parse(logstashTest.URL)\n\trequire.NoErrorf(test, err, \"Can't connect to: %s\", logstashTest.URL)\n\tfakeServer.Listener, _ = net.Listen(\"tcp\", fmt.Sprintf(\"%s:%s\", requestURL.Hostname(), requestURL.Port()))\n\tfakeServer.Start()\n\tdefer fakeServer.Close()\n\n\tif logstashTest.client == nil {\n\t\tclient, err := logstashTest.createHTTPClient()\n\t\trequire.NoError(test, err, \"Can't createHTTPClient\")\n\t\tlogstashTest.client = client\n\t}\n\n\terr = logstashTest.gatherJVMStats(logstashTest.URL+jvmStats, &logstash6accJVMStats)\n\trequire.NoError(test, err, \"Can't gather JVM stats\")\n\n\tlogstash6accJVMStats.AssertContainsTaggedFields(\n\t\ttest,\n\t\t\"logstash_jvm\",\n\t\tmap[string]interface{}{\n\t\t\t\"mem_pools_young_max_in_bytes\": float64(1605304320.0),\n\t\t\t\"mem_pools_young_committed_in_bytes\": float64(71630848.0),\n\t\t\t\"mem_heap_committed_in_bytes\": float64(824963072.0),\n\t\t\t\"threads_count\": float64(60.0),\n\t\t\t\"mem_pools_old_peak_used_in_bytes\": float64(696572600.0),\n\t\t\t\"mem_pools_old_peak_max_in_bytes\": float64(6583418880.0),\n\t\t\t\"mem_heap_used_percent\": float64(2.0),\n\t\t\t\"gc_collectors_young_collection_time_in_millis\": float64(107321.0),\n\t\t\t\"mem_pools_survivor_committed_in_bytes\": float64(8912896.0),\n\t\t\t\"mem_pools_young_used_in_bytes\": float64(11775120.0),\n\t\t\t\"mem_non_heap_committed_in_bytes\": float64(222986240.0),\n\t\t\t\"mem_pools_survivor_peak_max_in_bytes\": float64(200605696),\n\t\t\t\"mem_pools_young_peak_max_in_bytes\": float64(1605304320.0),\n\t\t\t\"uptime_in_millis\": float64(281850926.0),\n\t\t\t\"mem_pools_survivor_peak_used_in_bytes\": float64(8912896.0),\n\t\t\t\"mem_pools_survivor_max_in_bytes\": float64(200605696.0),\n\t\t\t\"gc_collectors_old_collection_count\": float64(37.0),\n\t\t\t\"mem_pools_survivor_used_in_bytes\": float64(835008.0),\n\t\t\t\"mem_pools_old_used_in_bytes\": float64(189750576.0),\n\t\t\t\"mem_pools_old_max_in_bytes\": float64(6583418880.0),\n\t\t\t\"mem_pools_young_peak_used_in_bytes\": float64(71630848.0),\n\t\t\t\"mem_heap_used_in_bytes\": float64(202360704.0),\n\t\t\t\"mem_heap_max_in_bytes\": float64(8389328896.0),\n\t\t\t\"gc_collectors_young_collection_count\": float64(2094.0),\n\t\t\t\"threads_peak_count\": float64(62.0),\n\t\t\t\"mem_pools_old_committed_in_bytes\": float64(744419328.0),\n\t\t\t\"gc_collectors_old_collection_time_in_millis\": float64(7492.0),\n\t\t\t\"mem_non_heap_used_in_bytes\": float64(197878896.0),\n\t\t},\n\t\tmap[string]string{\n\t\t\t\"node_id\": string(\"3044f675-21ce-4335-898a-8408aa678245\"),\n\t\t\t\"node_name\": string(\"node-6-test\"),\n\t\t\t\"source\": string(\"node-6\"),\n\t\t\t\"node_version\": string(\"6.4.2\"),\n\t\t},\n\t)\n}\n\nfunc Test_Logstash7GatherPipelinesQueueStats(test *testing.T) {\n\tfakeServer := httptest.NewUnstartedServer(http.HandlerFunc(func(writer http.ResponseWriter, request *http.Request) {\n\t\twriter.Header().Set(\"Content-Type\", \"application\/json\")\n\t\t_, err := fmt.Fprintf(writer, \"%s\", string(logstash7PipelinesJSON))\n\t\tif err != nil {\n\t\t\ttest.Logf(\"Can't print test json\")\n\t\t}\n\t}))\n\trequestURL, err := url.Parse(logstashTest.URL)\n\tif err != nil {\n\t\ttest.Logf(\"Can't connect to: %s\", logstashTest.URL)\n\t}\n\tfakeServer.Listener, _ = net.Listen(\"tcp\", fmt.Sprintf(\"%s:%s\", requestURL.Hostname(), requestURL.Port()))\n\tfakeServer.Start()\n\tdefer fakeServer.Close()\n\n\tif logstashTest.client == nil {\n\t\tclient, err := logstashTest.createHTTPClient()\n\n\t\tif err != nil {\n\t\t\ttest.Logf(\"Can't createHTTPClient\")\n\t\t}\n\t\tlogstashTest.client = client\n\t}\n\n\tif err := logstashTest.gatherPipelinesStats(logstashTest.URL+pipelineStats, &logstash7accPipelinesStats); err != nil {\n\t\ttest.Logf(\"Can't gather Pipeline stats\")\n\t}\n\n\tfields := make(map[string]interface{})\n\tfields[\"duration_in_millis\"] = float64(3032875.0)\n\tfields[\"queue_push_duration_in_millis\"] = float64(13300.0)\n\tfields[\"in\"] = float64(2665549.0)\n\tfields[\"filtered\"] = float64(2665549.0)\n\tfields[\"out\"] = float64(2665549.0)\n\n\tlogstash7accPipelinesStats.AssertContainsTaggedFields(\n\t\ttest,\n\t\t\"logstash_events\",\n\t\tfields,\n\t\tmap[string]string{\n\t\t\t\"node_id\": string(\"28580380-ad2c-4032-934b-76359125edca\"),\n\t\t\t\"node_name\": string(\"HOST01.local\"),\n\t\t\t\"source\": string(\"HOST01.local\"),\n\t\t\t\"node_version\": string(\"7.4.2\"),\n\t\t\t\"pipeline\": string(\"infra\"),\n\t\t},\n\t)\n\n\tlogstash7accPipelinesStats.AssertContainsTaggedFields(\n\t\ttest,\n\t\t\"logstash_plugins\",\n\t\tmap[string]interface{}{\n\t\t\t\"duration_in_millis\": float64(2802177.0),\n\t\t\t\"in\": float64(2665549.0),\n\t\t\t\"out\": float64(2665549.0),\n\t\t},\n\t\tmap[string]string{\n\t\t\t\"node_id\": string(\"28580380-ad2c-4032-934b-76359125edca\"),\n\t\t\t\"node_name\": string(\"HOST01.local\"),\n\t\t\t\"source\": string(\"HOST01.local\"),\n\t\t\t\"node_version\": string(\"7.4.2\"),\n\t\t\t\"pipeline\": string(\"infra\"),\n\t\t\t\"plugin_name\": string(\"elasticsearch\"),\n\t\t\t\"plugin_id\": string(\"38967f09bbd2647a95aa00702b6b557bdbbab31da6a04f991d38abe5629779e3\"),\n\t\t\t\"plugin_type\": string(\"output\"),\n\t\t},\n\t)\n\tlogstash7accPipelinesStats.AssertContainsTaggedFields(\n\t\ttest,\n\t\t\"logstash_plugins\",\n\t\tmap[string]interface{}{\n\t\t\t\"bulk_requests_successes\": float64(2870),\n\t\t\t\"bulk_requests_responses_200\": float64(2870),\n\t\t\t\"bulk_requests_failures\": float64(262),\n\t\t\t\"bulk_requests_with_errors\": float64(9089),\n\t\t},\n\t\tmap[string]string{\n\t\t\t\"node_id\": string(\"28580380-ad2c-4032-934b-76359125edca\"),\n\t\t\t\"node_name\": string(\"HOST01.local\"),\n\t\t\t\"source\": string(\"HOST01.local\"),\n\t\t\t\"node_version\": string(\"7.4.2\"),\n\t\t\t\"pipeline\": string(\"infra\"),\n\t\t\t\"plugin_name\": string(\"elasticsearch\"),\n\t\t\t\"plugin_id\": string(\"38967f09bbd2647a95aa00702b6b557bdbbab31da6a04f991d38abe5629779e3\"),\n\t\t\t\"plugin_type\": string(\"output\"),\n\t\t},\n\t)\n\tlogstash7accPipelinesStats.AssertContainsTaggedFields(\n\t\ttest,\n\t\t\"logstash_plugins\",\n\t\tmap[string]interface{}{\n\t\t\t\"documents_successes\": float64(2665549),\n\t\t\t\"documents_retryable_failures\": float64(13733),\n\t\t},\n\t\tmap[string]string{\n\t\t\t\"node_id\": string(\"28580380-ad2c-4032-934b-76359125edca\"),\n\t\t\t\"node_name\": string(\"HOST01.local\"),\n\t\t\t\"source\": string(\"HOST01.local\"),\n\t\t\t\"node_version\": string(\"7.4.2\"),\n\t\t\t\"pipeline\": string(\"infra\"),\n\t\t\t\"plugin_name\": string(\"elasticsearch\"),\n\t\t\t\"plugin_id\": string(\"38967f09bbd2647a95aa00702b6b557bdbbab31da6a04f991d38abe5629779e3\"),\n\t\t\t\"plugin_type\": string(\"output\"),\n\t\t},\n\t)\n\n\tlogstash7accPipelinesStats.AssertContainsTaggedFields(\n\t\ttest,\n\t\t\"logstash_queue\",\n\t\tmap[string]interface{}{\n\t\t\t\"events\": float64(0),\n\t\t\t\"max_queue_size_in_bytes\": float64(4294967296),\n\t\t\t\"queue_size_in_bytes\": float64(32028566),\n\t\t},\n\t\tmap[string]string{\n\t\t\t\"node_id\": string(\"28580380-ad2c-4032-934b-76359125edca\"),\n\t\t\t\"node_name\": string(\"HOST01.local\"),\n\t\t\t\"source\": string(\"HOST01.local\"),\n\t\t\t\"node_version\": string(\"7.4.2\"),\n\t\t\t\"pipeline\": string(\"infra\"),\n\t\t\t\"queue_type\": string(\"persisted\"),\n\t\t},\n\t)\n}\n","avg_line_length":36.4891994917,"max_line_length":119,"alphanum_fraction":0.6623254518} +{"size":2929,"ext":"go","lang":"Go","max_stars_count":1996.0,"content":"\/\/ Copyright Amazon.com Inc. or its affiliates. All Rights Reserved.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\"). You may\n\/\/ not use this file except in compliance with the License. A copy of the\n\/\/ License is located at\n\/\/\n\/\/\thttp:\/\/aws.amazon.com\/apache2.0\/\n\/\/\n\/\/ or in the \"license\" file accompanying this file. This file is distributed\n\/\/ on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n\/\/ express or implied. See the License for the specific language governing\n\/\/ permissions and limitations under the License.\n\n\/\/ Package ecr helps generate clients to talk to the ECR API\npackage ecr\n\nimport (\n\t\"fmt\"\n\t\"net\/http\"\n\t\"time\"\n\n\tapicontainer \"github.com\/aws\/amazon-ecs-agent\/agent\/api\/container\"\n\t\"github.com\/aws\/amazon-ecs-agent\/agent\/credentials\"\n\t\"github.com\/aws\/amazon-ecs-agent\/agent\/credentials\/instancecreds\"\n\tecrapi \"github.com\/aws\/amazon-ecs-agent\/agent\/ecr\/model\/ecr\"\n\t\"github.com\/aws\/amazon-ecs-agent\/agent\/httpclient\"\n\t\"github.com\/aws\/aws-sdk-go\/aws\"\n\tawscreds \"github.com\/aws\/aws-sdk-go\/aws\/credentials\"\n\t\"github.com\/aws\/aws-sdk-go\/aws\/session\"\n)\n\n\/\/ ECRFactory defines the interface to produce an ECR SDK client\ntype ECRFactory interface {\n\tGetClient(*apicontainer.ECRAuthData) (ECRClient, error)\n}\n\ntype ecrFactory struct {\n\thttpClient *http.Client\n}\n\nconst (\n\troundtripTimeout = 5 * time.Second\n)\n\n\/\/ NewECRFactory returns an ECRFactory capable of producing ECRSDK clients\nfunc NewECRFactory(acceptInsecureCert bool) ECRFactory {\n\treturn &ecrFactory{\n\t\thttpClient: httpclient.New(roundtripTimeout, acceptInsecureCert),\n\t}\n}\n\n\/\/ GetClient creates the ECR SDK client based on the authdata\nfunc (factory *ecrFactory) GetClient(authData *apicontainer.ECRAuthData) (ECRClient, error) {\n\tclientConfig, err := getClientConfig(factory.httpClient, authData)\n\tif err != nil {\n\t\treturn &ecrClient{}, err\n\t}\n\n\treturn factory.newClient(clientConfig), nil\n}\n\n\/\/ getClientConfig returns the config for the ecr client based on authData\nfunc getClientConfig(httpClient *http.Client, authData *apicontainer.ECRAuthData) (*aws.Config, error) {\n\tcfg := aws.NewConfig().WithRegion(authData.Region).WithHTTPClient(httpClient)\n\tif authData.EndpointOverride != \"\" {\n\t\tcfg.Endpoint = aws.String(authData.EndpointOverride)\n\t}\n\n\tif authData.UseExecutionRole {\n\t\tif authData.GetPullCredentials() == (credentials.IAMRoleCredentials{}) {\n\t\t\treturn nil, fmt.Errorf(\"container uses execution credentials, but the credentials are empty\")\n\t\t}\n\t\tcreds := awscreds.NewStaticCredentials(authData.GetPullCredentials().AccessKeyID,\n\t\t\tauthData.GetPullCredentials().SecretAccessKey,\n\t\t\tauthData.GetPullCredentials().SessionToken)\n\t\tcfg = cfg.WithCredentials(creds)\n\t} else {\n\t\tcfg = cfg.WithCredentials(instancecreds.GetCredentials())\n\t}\n\n\treturn cfg, nil\n}\n\nfunc (factory *ecrFactory) newClient(cfg *aws.Config) ECRClient {\n\tsdkClient := ecrapi.New(session.New(cfg))\n\treturn NewECRClient(sdkClient)\n}\n","avg_line_length":33.2840909091,"max_line_length":104,"alphanum_fraction":0.7644247183} +{"size":2795,"ext":"go","lang":"Go","max_stars_count":null,"content":"\/\/ Copyright Turing Corp. 2018 All Rights Reserved.\n\/\/ Use of this source code is governed by a BSD-style\n\/\/ license that can be found in the LICENSE file.\n\npackage executor\n\nimport (\n\t\"github.com\/turingchain2020\/turingchain\/account\"\n\t\"github.com\/turingchain2020\/turingchain\/common\/db\"\n\t\"github.com\/turingchain2020\/turingchain\/types\"\n\tpt \"github.com\/turingchain2020\/plugin\/plugin\/dapp\/trade\/types\"\n)\n\n\/*\n \u5728\u4ee5\u524d\u7248\u672c\u4e2d\u53ea\u6709token \u5408\u7ea6\u53d1\u884c\u7684\u5e01\u5728trade \u91cc\u9762\u4ea4\u6613\uff0c \u8ba2\u5355\u4e2d symbol \u4e3a token \u7684symbol\uff0c\n \u73b0\u5728 symbol \u6269\u5c55\u6210 exec.sybol@title, @title \u5148\u5ffd\u7565\uff0c (\u56e0\u4e3a\u4e0d\u5fc5\u8981, \u53ea\u652f\u6301\u4e3b\u94fe\u5230\u5e73\u884c\u94fe)\u3002\n \u5728\u8ba2\u5355\u4e2d\u589e\u52a0 exec\uff0c \u8868\u793a\u5e01\u4ece\u90a3\u4e2a\u5408\u7ea6\u4e2d\u6765\u7684\u3002\n\n \u5728\u4e3b\u94fe\n \u539f\u6765\u7684\u8ba2\u5355 exec = \"\" symbol = \"TEST\"\n \u65b0\u7684\u8ba2\u5355 exec = \"token\" symbol = \"token.TEST\"\n\n \u5728\u5e73\u884c\u94fe, \u4e3b\u94fe\u8d44\u4ea7\u548c\u672c\u94fe\u8d44\u4ea7\u7684\u8868\u793a\u533a\u522b\u5982\u4e0b\n exec = \"paracross\" symbol = \"token.TEST\"\n exec = \"token\" symbol = \"token.TEST\"\n\n*\/\n\n\/\/GetExecSymbol : return exec, symbol\nfunc GetExecSymbol(order *pt.SellOrder) (string, string) {\n\tif order.AssetExec == \"\" {\n\t\treturn defaultAssetExec, defaultAssetExec + \".\" + order.TokenSymbol\n\t}\n\treturn order.AssetExec, order.TokenSymbol\n}\n\nfunc checkAsset(cfg *types.TuringchainConfig, height int64, exec, symbol string) bool {\n\tif cfg.IsDappFork(height, pt.TradeX, pt.ForkTradeAssetX) {\n\t\tif exec == \"\" || symbol == \"\" {\n\t\t\treturn false\n\t\t}\n\t} else {\n\t\tif exec != \"\" {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}\n\nfunc checkPrice(cfg *types.TuringchainConfig, height int64, exec, symbol string) bool {\n\tif cfg.IsDappFork(height, pt.TradeX, pt.ForkTradePriceX) {\n\t\tif exec == \"\" && symbol != \"\" || exec != \"\" && symbol == \"\" {\n\t\t\treturn false\n\t\t}\n\t} else {\n\t\tif exec != \"\" || symbol != \"\" {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}\n\nfunc notSameAsset(cfg *types.TuringchainConfig, height int64, assetExec, assetSymbol, priceExec, priceSymbol string) bool {\n\tif cfg.IsDappFork(height, pt.TradeX, pt.ForkTradePriceX) {\n\t\tif assetExec == priceExec && assetSymbol == priceSymbol {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}\n\nfunc createAccountDB(cfg *types.TuringchainConfig, height int64, db db.KV, exec, symbol string) (*account.DB, error) {\n\tif cfg.IsDappFork(height, pt.TradeX, pt.ForkTradeFixAssetDBX) {\n\t\tif exec == \"\" {\n\t\t\texec = defaultAssetExec\n\t\t}\n\t\treturn account.NewAccountDB(cfg, exec, symbol, db)\n\t} else if cfg.IsDappFork(height, pt.TradeX, pt.ForkTradeAssetX) {\n\t\treturn account.NewAccountDB(cfg, exec, symbol, db)\n\t}\n\n\treturn account.NewAccountDB(cfg, defaultAssetExec, symbol, db)\n}\n\nfunc createPriceDB(cfg *types.TuringchainConfig, height int64, db db.KV, exec, symbol string) (*account.DB, error) {\n\tif cfg.IsDappFork(height, pt.TradeX, pt.ForkTradePriceX) {\n\t\t\/\/ \u4e3a\u7a7a\u9ed8\u8ba4\u4f7f\u7528 coins\n\t\tif exec == \"\" {\n\t\t\tacc := account.NewCoinsAccount(cfg)\n\t\t\tacc.SetDB(db)\n\t\t\treturn acc, nil\n\t\t}\n\t\treturn account.NewAccountDB(cfg, exec, symbol, db)\n\t}\n\tacc := account.NewCoinsAccount(cfg)\n\tacc.SetDB(db)\n\treturn acc, nil\n}\n","avg_line_length":28.2323232323,"max_line_length":123,"alphanum_fraction":0.6958855098} +{"size":860,"ext":"go","lang":"Go","max_stars_count":null,"content":"package qtypes_helper\n\nimport (\n\t\"fmt\"\n\t\"reflect\"\n)\n\nfunc CompareMap(exp, got map[string]interface{}) bool {\n\tfor eK, eV := range exp {\n\t\tgV, ok := got[eK]\n\t\tif ! ok {\n\t\t\tfmt.Printf(\"Expected key '%s' not found\\n\", eK)\n\t\t\treturn false\n\t\t}\n\t\tswitch eV.(type) {\n\t\tcase string,int,int64,float64,bool:\n\t\t\tif eV != gV {\n\t\t\t\tfmt.Printf(\"Key '%s' differs: expected:%v != %v\\n\", eK, eV, gV)\n\t\t\t\treturn false\n\t\t\t}\n\t\tcase []string:\n\t\t\tif ! reflect.DeepEqual(eV, gV) {\n\t\t\t\tfmt.Printf(\"Key '%s' differs: expected:%v != %v\\n\", eK, eV, gV)\n\t\t\t\treturn false\n\t\t\t}\n\t\tcase map[string]string:\n\t\t\tif ! reflect.DeepEqual(eV, gV) {\n\t\t\t\tfmt.Printf(\"Key '%s' differs: expected:%v != %v\\n\", eK, eV, gV)\n\t\t\t\treturn false\n\t\t\t}\n\t\tdefault:\n\t\t\tif ! reflect.DeepEqual(eV, gV) {\n\t\t\t\tfmt.Printf(\"Key '%s' differs: expected:%v != %v\\n\", eK, eV, gV)\n\t\t\t\treturn false\n\t\t\t}\n\t\t}\n\n\t}\n\treturn true\n}\n","avg_line_length":20.9756097561,"max_line_length":67,"alphanum_fraction":0.5813953488} +{"size":3610,"ext":"go","lang":"Go","max_stars_count":35.0,"content":"\/\/ Code generated by protoc-gen-go.\n\/\/ source: google.golang.org\/genproto\/googleapis\/api\/serviceconfig\/backend.proto\n\/\/ DO NOT EDIT!\n\npackage google_api \/\/ import \"google.golang.org\/genproto\/googleapis\/api\/serviceconfig\"\n\nimport proto \"github.com\/golang\/protobuf\/proto\"\nimport fmt \"fmt\"\nimport math \"math\"\n\n\/\/ Reference imports to suppress errors if they are not otherwise used.\nvar _ = proto.Marshal\nvar _ = fmt.Errorf\nvar _ = math.Inf\n\n\/\/ `Backend` defines the backend configuration for a service.\ntype Backend struct {\n\t\/\/ A list of backend rules providing configuration for individual API\n\t\/\/ elements.\n\tRules []*BackendRule `protobuf:\"bytes,1,rep,name=rules\" json:\"rules,omitempty\"`\n}\n\nfunc (m *Backend) Reset() { *m = Backend{} }\nfunc (m *Backend) String() string { return proto.CompactTextString(m) }\nfunc (*Backend) ProtoMessage() {}\nfunc (*Backend) Descriptor() ([]byte, []int) { return fileDescriptor2, []int{0} }\n\nfunc (m *Backend) GetRules() []*BackendRule {\n\tif m != nil {\n\t\treturn m.Rules\n\t}\n\treturn nil\n}\n\n\/\/ A backend rule provides configuration for an individual API element.\ntype BackendRule struct {\n\t\/\/ Selects the methods to which this rule applies.\n\t\/\/\n\t\/\/ Refer to [selector][google.api.DocumentationRule.selector] for syntax details.\n\tSelector string `protobuf:\"bytes,1,opt,name=selector\" json:\"selector,omitempty\"`\n\t\/\/ The address of the API backend.\n\t\/\/\n\tAddress string `protobuf:\"bytes,2,opt,name=address\" json:\"address,omitempty\"`\n\t\/\/ The number of seconds to wait for a response from a request. The\n\t\/\/ default depends on the deployment context.\n\tDeadline float64 `protobuf:\"fixed64,3,opt,name=deadline\" json:\"deadline,omitempty\"`\n}\n\nfunc (m *BackendRule) Reset() { *m = BackendRule{} }\nfunc (m *BackendRule) String() string { return proto.CompactTextString(m) }\nfunc (*BackendRule) ProtoMessage() {}\nfunc (*BackendRule) Descriptor() ([]byte, []int) { return fileDescriptor2, []int{1} }\n\nfunc init() {\n\tproto.RegisterType((*Backend)(nil), \"google.api.Backend\")\n\tproto.RegisterType((*BackendRule)(nil), \"google.api.BackendRule\")\n}\n\nfunc init() {\n\tproto.RegisterFile(\"google.golang.org\/genproto\/googleapis\/api\/serviceconfig\/backend.proto\", fileDescriptor2)\n}\n\nvar fileDescriptor2 = []byte{\n\t\/\/ 207 bytes of a gzipped FileDescriptorProto\n\t0x1f, 0x8b, 0x08, 0x00, 0x00, 0x09, 0x6e, 0x88, 0x02, 0xff, 0x4c, 0x8f, 0x3d, 0x4f, 0x04, 0x21,\n\t0x10, 0x86, 0x83, 0x17, 0x3d, 0x9d, 0x33, 0x16, 0x34, 0x12, 0x2b, 0x72, 0xd5, 0x36, 0x42, 0xa2,\n\t0x8d, 0xf5, 0x26, 0xf6, 0x17, 0xfe, 0x80, 0xe1, 0x60, 0x24, 0x44, 0x64, 0x08, 0xdc, 0xfa, 0xfb,\n\t0xcd, 0x7e, 0xb8, 0x6e, 0xf9, 0xf2, 0x3c, 0xcc, 0xcc, 0x0b, 0xef, 0x81, 0x28, 0x24, 0x54, 0x81,\n\t0x92, 0xcd, 0x41, 0x51, 0x0d, 0x3a, 0x60, 0x2e, 0x95, 0x2e, 0xa4, 0x67, 0x64, 0x4b, 0x6c, 0xda,\n\t0x96, 0xa8, 0x1b, 0xd6, 0x9f, 0xe8, 0xd0, 0x51, 0xfe, 0x8c, 0x41, 0x9f, 0xad, 0xfb, 0xc2, 0xec,\n\t0xd5, 0xa4, 0x72, 0x58, 0xc6, 0xd8, 0x12, 0x8f, 0x6f, 0xb0, 0xef, 0x67, 0xc8, 0x9f, 0xe1, 0xba,\n\t0x0e, 0x09, 0x9b, 0x60, 0x72, 0xd7, 0x1d, 0x5e, 0x1e, 0xd5, 0xbf, 0xa6, 0x16, 0xc7, 0x0c, 0x09,\n\t0xcd, 0x6c, 0x1d, 0x3f, 0xe0, 0xb0, 0x79, 0xe5, 0x4f, 0x70, 0xdb, 0x30, 0xa1, 0xbb, 0x50, 0x15,\n\t0x4c, 0xb2, 0xee, 0xce, 0xac, 0x99, 0x0b, 0xd8, 0x5b, 0xef, 0x2b, 0xb6, 0x26, 0xae, 0x26, 0xf4,\n\t0x17, 0xc7, 0x5f, 0x1e, 0xad, 0x4f, 0x31, 0xa3, 0xd8, 0x49, 0xd6, 0x31, 0xb3, 0xe6, 0x5e, 0xc2,\n\t0x83, 0xa3, 0xef, 0xcd, 0x15, 0xfd, 0xfd, 0xb2, 0xf0, 0x34, 0xd6, 0x38, 0xb1, 0xf3, 0xcd, 0xd4,\n\t0xe7, 0xf5, 0x37, 0x00, 0x00, 0xff, 0xff, 0x99, 0xf6, 0x26, 0x9c, 0x18, 0x01, 0x00, 0x00,\n}\n","avg_line_length":45.6962025316,"max_line_length":109,"alphanum_fraction":0.6894736842} +{"size":2072,"ext":"go","lang":"Go","max_stars_count":122.0,"content":"package groupingobjects_test\n\nimport (\n\t\"context\"\n\t\"errors\"\n\n\t\"github.com\/genevieve\/leftovers\/nsxt\/groupingobjects\"\n\t\"github.com\/genevieve\/leftovers\/nsxt\/groupingobjects\/fakes\"\n\t. \"github.com\/onsi\/ginkgo\"\n\t. \"github.com\/onsi\/gomega\"\n\t\"github.com\/vmware\/go-vmware-nsxt\/manager\"\n)\n\nvar _ = Describe(\"NS Groups\", func() {\n\tvar (\n\t\tclient *fakes.GroupingObjectsAPI\n\t\tlogger *fakes.Logger\n\t\tctx context.Context\n\t\tnsGroups groupingobjects.NSGroups\n\t)\n\n\tBeforeEach(func() {\n\t\tclient = &fakes.GroupingObjectsAPI{}\n\t\tlogger = &fakes.Logger{}\n\n\t\tctx = context.WithValue(context.Background(), \"fruit\", \"pineapple\")\n\n\t\tlogger.PromptWithDetailsCall.Returns.Proceed = true\n\n\t\tnsGroups = groupingobjects.NewNSGroups(client, ctx, logger)\n\t})\n\n\tDescribe(\"List\", func() {\n\t\tvar filter string\n\n\t\tBeforeEach(func() {\n\t\t\tclient.ListNSGroupsCall.Returns.NsGroupListResult = manager.NsGroupListResult{\n\t\t\t\tResults: []manager.NsGroup{\n\t\t\t\t\tmanager.NsGroup{\n\t\t\t\t\t\tId: \"pineapple-123\",\n\t\t\t\t\t\tDisplayName: \"pineapple\",\n\t\t\t\t\t},\n\t\t\t\t\tmanager.NsGroup{\n\t\t\t\t\t\tId: \"cherimoya-456\",\n\t\t\t\t\t\tDisplayName: \"cherimoya\",\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t}\n\n\t\t\tfilter = \"pineapple\"\n\t\t})\n\n\t\tIt(\"lists, filters, and prompts for ns groups to delete\", func() {\n\t\t\tlist, err := nsGroups.List(filter)\n\t\t\tExpect(err).NotTo(HaveOccurred())\n\n\t\t\tExpect(client.ListNSGroupsCall.CallCount).To(Equal(1))\n\t\t\tExpect(client.ListNSGroupsCall.Receives.Context).To(Equal(ctx))\n\n\t\t\tExpect(logger.PromptWithDetailsCall.CallCount).To(Equal(1))\n\t\t\tExpect(logger.PromptWithDetailsCall.Receives.ResourceType).To(Equal(\"NS Group\"))\n\t\t\tExpect(logger.PromptWithDetailsCall.Receives.ResourceName).To(Equal(\"pineapple\"))\n\n\t\t\tExpect(list).To(HaveLen(1))\n\t\t\tExpect(list[0].Name()).NotTo(Equal(\"cherimoya\"))\n\t\t})\n\n\t\tContext(\"when the client fails to list ns groups\", func() {\n\t\t\tBeforeEach(func() {\n\t\t\t\tclient.ListNSGroupsCall.Returns.Error = errors.New(\"PC LOAD LETTER\")\n\t\t\t})\n\n\t\t\tIt(\"returns the error\", func() {\n\t\t\t\t_, err := nsGroups.List(filter)\n\t\t\t\tExpect(err).To(MatchError(\"List NS Groups: PC LOAD LETTER\"))\n\t\t\t})\n\t\t})\n\t})\n})\n","avg_line_length":25.9,"max_line_length":84,"alphanum_fraction":0.6867760618} +{"size":6215,"ext":"go","lang":"Go","max_stars_count":1.0,"content":"\/\/ Copyright (c) 2021 SAP SE or an SAP affiliate company. All rights reserved. This file is licensed under the Apache Software License, v. 2 except as noted otherwise in the LICENSE file\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\npackage controller\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"time\"\n\n\tcorev1 \"k8s.io\/api\/core\/v1\"\n\tapierrors \"k8s.io\/apimachinery\/pkg\/api\/errors\"\n\tmetav1 \"k8s.io\/apimachinery\/pkg\/apis\/meta\/v1\"\n\t\"sigs.k8s.io\/controller-runtime\/pkg\/client\"\n\n\t\"github.com\/gardener\/gardener\/landscaper\/pkg\/gardenlet\/chart\"\n\tgardencorev1beta1 \"github.com\/gardener\/gardener\/pkg\/apis\/core\/v1beta1\"\n\t\"github.com\/gardener\/gardener\/pkg\/utils\/retry\"\n)\n\n\/\/ Delete removes all deployed Gardenlet resources from the Seed cluster.\nfunc (g Landscaper) Delete(ctx context.Context) error {\n\tseed := &gardencorev1beta1.Seed{ObjectMeta: metav1.ObjectMeta{\n\t\tName: g.gardenletConfiguration.SeedConfig.Name,\n\t}}\n\n\texists, err := g.seedExists(ctx, seed)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to check if Seed %q exists in the Garden cluster. Aborting. %w\", g.gardenletConfiguration.SeedConfig.Name, err)\n\t}\n\n\tif exists {\n\t\tshootList := &gardencorev1beta1.ShootList{}\n\t\tif err := g.gardenClient.Client().List(ctx, shootList); err != nil {\n\t\t\treturn fmt.Errorf(\"failed to list shoots: %w\", err)\n\t\t}\n\n\t\tif isSeedUsedByAnyShoot(g.gardenletConfiguration.SeedConfig.Name, shootList.Items) {\n\t\t\treturn fmt.Errorf(\"cannot delete seed '%s' which is still used by at least one shoot\", g.gardenletConfiguration.SeedConfig.Name)\n\t\t}\n\n\t\terr := g.deleteSeedAndWait(ctx, seed)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tchartApplier := g.seedClient.ChartApplier()\n\tvalues, err := g.computeGardenletChartValues(nil)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to compute gardenlet chart values: %w\", err)\n\t}\n\n\tapplier := chart.NewGardenletChartApplier(chartApplier, values, g.chartPath)\n\tif err := applier.Destroy(ctx); err != nil {\n\t\treturn fmt.Errorf(\"failed to delete the Gardenlet resources from the Seed cluster %q: %w\", g.gardenletConfiguration.SeedConfig.Name, err)\n\t}\n\n\t\/\/ delete the Seed secret containing the Seed cluster kubeconfig from the Garden cluster\n\tif g.gardenletConfiguration.SeedConfig.Spec.SecretRef != nil {\n\t\tif err := g.gardenClient.Client().Delete(ctx, &corev1.Secret{ObjectMeta: metav1.ObjectMeta{Name: g.gardenletConfiguration.SeedConfig.Spec.SecretRef.Name, Namespace: g.gardenletConfiguration.SeedConfig.Spec.SecretRef.Namespace}}); client.IgnoreNotFound(err) != nil {\n\t\t\treturn fmt.Errorf(\"failed to delete the secret from the Garden cluster (%s\/%s) containing the Seed cluster's kubeconfig: %w\", g.gardenletConfiguration.SeedConfig.Spec.SecretRef.Namespace, g.gardenletConfiguration.SeedConfig.Spec.SecretRef.Name, err)\n\t\t}\n\t}\n\n\t\/\/ delete the seed-backup secret from the Garden cluster\n\tif g.imports.SeedBackupCredentials != nil && g.gardenletConfiguration.SeedConfig.Spec.Backup != nil {\n\t\tif err := g.gardenClient.Client().Delete(ctx, &corev1.Secret{ObjectMeta: metav1.ObjectMeta{Name: g.gardenletConfiguration.SeedConfig.Spec.Backup.SecretRef.Name, Namespace: g.gardenletConfiguration.SeedConfig.Spec.Backup.SecretRef.Namespace}}); client.IgnoreNotFound(err) != nil {\n\t\t\treturn fmt.Errorf(\"failed to delete the Seed backup secret (%s\/%s) from the Garden cluster: %w\", g.gardenletConfiguration.SeedConfig.Spec.Backup.SecretRef.Namespace, g.gardenletConfiguration.SeedConfig.Spec.Backup.SecretRef.Name, err)\n\t\t}\n\t}\n\n\tg.log.Infof(\"Successfully deleted Gardenlet resources for Seed %q\", g.gardenletConfiguration.SeedConfig.Name)\n\treturn nil\n}\n\n\/\/ deleteSeedAndWait waits until the Seed resource has been deleted from the Garden cluster\nfunc (g Landscaper) deleteSeedAndWait(ctx context.Context, seed *gardencorev1beta1.Seed) error {\n\tif err := retry.UntilTimeout(ctx, 10*time.Second, 2*time.Minute, func(ctx context.Context) (done bool, err error) {\n\t\terr = g.gardenClient.Client().Delete(ctx, seed)\n\t\tif err != nil {\n\t\t\tg.log.Infof(\"Error deleting seed %q: %v\", g.gardenletConfiguration.SeedConfig.Name, err)\n\t\t\treturn retry.MinorError(err)\n\t\t}\n\t\treturn retry.Ok()\n\t}); err != nil {\n\t\treturn fmt.Errorf(\"failed to delete seed %q: %w\", g.gardenletConfiguration.SeedConfig.Name, err)\n\t}\n\n\tif err := retry.UntilTimeout(ctx, 10*time.Second, 10*time.Minute, func(ctx context.Context) (done bool, err error) {\n\t\tseedExists, err := g.seedExists(ctx, seed)\n\t\tif err != nil {\n\t\t\tg.log.Infof(\"Error while waiting for seed to be deleted: %s\", err.Error())\n\t\t\treturn retry.MinorError(err)\n\t\t}\n\n\t\tif !seedExists {\n\t\t\tg.log.Infof(\"Seed %q has been deleted successfully\", seed.Name)\n\t\t\treturn retry.Ok()\n\t\t}\n\n\t\tg.log.Infof(\"Waiting for seed %s to be deleted\", seed.Name)\n\t\treturn retry.MinorError(fmt.Errorf(\"seed %q still exists\", seed.Name))\n\t}); err != nil {\n\t\treturn fmt.Errorf(\"failed waiting for the deletion of Seed %q: %w\", g.gardenletConfiguration.SeedConfig.Name, err)\n\t}\n\treturn nil\n}\n\n\/\/ seedExists checks if the given Seed resource exists in the garden cluster\nfunc (g *Landscaper) seedExists(ctx context.Context, seed *gardencorev1beta1.Seed) (bool, error) {\n\terr := g.gardenClient.Client().Get(ctx, client.ObjectKey{Name: seed.Name}, seed)\n\tif err != nil {\n\t\tif apierrors.IsNotFound(err) || g.isIntegrationTest {\n\t\t\treturn false, nil\n\t\t}\n\t\treturn false, err\n\t}\n\treturn true, nil\n}\n\n\/\/ isSeedUsedByAnyShoot checks whether there is a shoot cluster referencing the provided seed name\nfunc isSeedUsedByAnyShoot(seedName string, shoots []gardencorev1beta1.Shoot) bool {\n\tfor _, shoot := range shoots {\n\t\tif shoot.Spec.SeedName != nil && *shoot.Spec.SeedName == seedName {\n\t\t\treturn true\n\t\t}\n\t\tif shoot.Status.SeedName != nil && *shoot.Status.SeedName == seedName {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}\n","avg_line_length":42.8620689655,"max_line_length":281,"alphanum_fraction":0.743845535} +{"size":6022,"ext":"go","lang":"Go","max_stars_count":null,"content":"\/*\n Licensed to the Apache Software Foundation (ASF) under one\n or more contributor license agreements. See the NOTICE file\n distributed with this work for additional information\n regarding copyright ownership. The ASF licenses this file\n to you under the Apache License, Version 2.0 (the\n \"License\"); you may not use this file except in compliance\n with the License. You may obtain a copy of the License at\n\n http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n*\/\n\npackage utils\n\nimport (\n\t\"fmt\"\n\t\"strings\"\n\t\"time\"\n\n\tv1 \"k8s.io\/api\/core\/v1\"\n\t\"k8s.io\/apimachinery\/pkg\/api\/resource\"\n\tapis \"k8s.io\/apimachinery\/pkg\/apis\/meta\/v1\"\n\tpodv1 \"k8s.io\/kubernetes\/pkg\/api\/v1\/pod\"\n\n\t\"github.com\/apache\/incubator-yunikorn-k8shim\/pkg\/common\"\n\t\"github.com\/apache\/incubator-yunikorn-k8shim\/pkg\/common\/constants\"\n\t\"github.com\/apache\/incubator-yunikorn-scheduler-interface\/lib\/go\/si\"\n)\n\nfunc Convert2Pod(obj interface{}) (*v1.Pod, error) {\n\tpod, ok := obj.(*v1.Pod)\n\tif !ok {\n\t\treturn nil, fmt.Errorf(\"cannot convert to *v1.Pod: %v\", obj)\n\t}\n\treturn pod, nil\n}\n\nfunc NeedRecovery(pod *v1.Pod) (bool, error) {\n\tif pod.Status.Phase == v1.PodPending {\n\t\treturn false, nil\n\t}\n\n\tif !GeneralPodFilter(pod) {\n\t\treturn false, nil\n\t}\n\n\tif pod.Spec.NodeName != \"\" {\n\t\treturn true, nil\n\t}\n\n\treturn false, fmt.Errorf(\"unknown pod state %v\", pod)\n}\n\nfunc IsPodRunning(pod *v1.Pod) bool {\n\treturn pod.Status.Phase == v1.PodRunning\n}\n\nfunc IsPodTerminated(pod *v1.Pod) bool {\n\treturn pod.Status.Phase == v1.PodFailed || pod.Status.Phase == v1.PodSucceeded\n}\n\n\/\/ assignedPod selects pods that are assigned (scheduled and running).\nfunc IsAssignedPod(pod *v1.Pod) bool {\n\treturn len(pod.Spec.NodeName) != 0\n}\n\nfunc GeneralPodFilter(pod *v1.Pod) bool {\n\treturn strings.Compare(pod.Spec.SchedulerName, constants.SchedulerName) == 0\n}\n\nfunc GetQueueNameFromPod(pod *v1.Pod) string {\n\tqueueName := constants.ApplicationDefaultQueue\n\tif an, ok := pod.Labels[constants.LabelQueueName]; ok {\n\t\tqueueName = an\n\t}\n\treturn queueName\n}\n\nfunc GetApplicationIDFromPod(pod *v1.Pod) (string, error) {\n\t\/\/ application ID can be defined in annotations\n\tfor name, value := range pod.Annotations {\n\t\tif name == constants.AnnotationApplicationID {\n\t\t\treturn value, nil\n\t\t}\n\t}\n\n\t\/\/ application ID can be defined in labels\n\tfor name, value := range pod.Labels {\n\t\t\/\/ application ID can be defined as a label\n\t\tif name == constants.LabelApplicationID {\n\t\t\treturn value, nil\n\t\t}\n\n\t\t\/\/ if a pod for spark already provided appID, reuse it\n\t\tif name == constants.SparkLabelAppID {\n\t\t\treturn value, nil\n\t\t}\n\t}\n\n\treturn \"\", fmt.Errorf(\"unable to retrieve application ID from pod spec, %s\",\n\t\tpod.Spec.String())\n}\n\n\/\/ compare the existing pod condition with the given one, return true if the pod condition remains not changed.\n\/\/ return false if pod has no condition set yet, or condition has changed.\nfunc PodUnderCondition(pod *v1.Pod, condition *v1.PodCondition) bool {\n\t_, current := podv1.GetPodCondition(&pod.Status, condition.Type)\n\treturn current != nil && current.Status == condition.Status && current.Reason == condition.Reason\n}\n\nfunc GetNamespaceQuotaFromAnnotation(namespaceObj *v1.Namespace) *si.Resource {\n\t\/\/ retrieve resource quota info from annotations\n\tcpuQuota := namespaceObj.Annotations[\"yunikorn.apache.org\/namespace.max.cpu\"]\n\tmemQuota := namespaceObj.Annotations[\"yunikorn.apache.org\/namespace.max.memory\"]\n\n\t\/\/ no quota found\n\tif cpuQuota == \"\" && memQuota == \"\" {\n\t\treturn nil\n\t}\n\n\treturn common.ParseResource(cpuQuota, memQuota)\n}\n\ntype K8sResource struct {\n\tResourceName v1.ResourceName\n\tValue int64\n}\n\nfunc NewK8sResourceList(resources ...K8sResource) map[v1.ResourceName]resource.Quantity {\n\tresourceList := make(map[v1.ResourceName]resource.Quantity)\n\tfor _, r := range resources {\n\t\tresourceList[r.ResourceName] = *resource.NewQuantity(r.Value, resource.DecimalSI)\n\t}\n\treturn resourceList\n}\n\nfunc WaitForCondition(eval func() bool, interval time.Duration, timeout time.Duration) error {\n\tdeadline := time.Now().Add(timeout)\n\tfor {\n\t\tif eval() {\n\t\t\treturn nil\n\t\t}\n\n\t\tif time.Now().After(deadline) {\n\t\t\treturn fmt.Errorf(\"timeout waiting for condition\")\n\t\t}\n\n\t\ttime.Sleep(interval)\n\t}\n}\n\nfunc PodForTest(podName, memory, cpu string) *v1.Pod {\n\tcontainers := make([]v1.Container, 0)\n\tc1Resources := make(map[v1.ResourceName]resource.Quantity)\n\tc1Resources[v1.ResourceMemory] = resource.MustParse(memory)\n\tc1Resources[v1.ResourceCPU] = resource.MustParse(cpu)\n\tcontainers = append(containers, v1.Container{\n\t\tName: \"container-01\",\n\t\tResources: v1.ResourceRequirements{\n\t\t\tRequests: c1Resources,\n\t\t},\n\t})\n\n\treturn &v1.Pod{\n\t\tTypeMeta: apis.TypeMeta{\n\t\t\tKind: \"Pod\",\n\t\t\tAPIVersion: \"v1\",\n\t\t},\n\t\tObjectMeta: apis.ObjectMeta{\n\t\t\tName: podName,\n\t\t},\n\t\tSpec: v1.PodSpec{\n\t\t\tContainers: containers,\n\t\t},\n\t}\n}\n\nfunc NodeForTest(nodeID, memory, cpu string) *v1.Node {\n\tresourceList := make(map[v1.ResourceName]resource.Quantity)\n\tresourceList[v1.ResourceName(\"memory\")] = resource.MustParse(memory)\n\tresourceList[v1.ResourceName(\"cpu\")] = resource.MustParse(cpu)\n\treturn &v1.Node{\n\t\tTypeMeta: apis.TypeMeta{\n\t\t\tKind: \"Node\",\n\t\t\tAPIVersion: \"v1\",\n\t\t},\n\t\tObjectMeta: apis.ObjectMeta{\n\t\t\tName: nodeID,\n\t\t\tNamespace: \"default\",\n\t\t\tUID: \"uid_0001\",\n\t\t},\n\t\tSpec: v1.NodeSpec{},\n\t\tStatus: v1.NodeStatus{\n\t\t\tAllocatable: resourceList,\n\t\t},\n\t}\n}\n\n\/\/ merge two string maps\n\/\/ if the same key defined in the first and second maps\n\/\/ the value will be set by the second map\nfunc MergeMaps(first, second map[string]string) map[string]string {\n\tif first == nil && second == nil {\n\t\treturn nil\n\t}\n\tresult := make(map[string]string)\n\tfor k, v := range first {\n\t\tresult[k] = v\n\t}\n\tfor k, v := range second {\n\t\tresult[k] = v\n\t}\n\treturn result\n}\n","avg_line_length":27.2488687783,"max_line_length":111,"alphanum_fraction":0.7191962803} +{"size":3952,"ext":"go","lang":"Go","max_stars_count":null,"content":"\/\/ Copyright (c) 2015 Mattermost, Inc. All Rights Reserved.\n\/\/ See License.txt for license information.\n\npackage server\n\nimport (\n\t\"time\"\n\n\tfcm \"github.com\/appleboy\/go-fcm\"\n\t\"github.com\/kyokomi\/emoji\"\n)\n\ntype AndroidNotificationServer struct {\n\tAndroidPushSettings AndroidPushSettings\n\tmetrics *metrics\n\tlogger *Logger\n}\n\nfunc NewAndroidNotificationServer(settings AndroidPushSettings, logger *Logger, metrics *metrics) NotificationServer {\n\treturn &AndroidNotificationServer{\n\t\tAndroidPushSettings: settings,\n\t\tmetrics: metrics,\n\t\tlogger: logger,\n\t}\n}\n\nfunc (me *AndroidNotificationServer) Initialize() bool {\n\tme.logger.Infof(\"Initializing Android notification server for type=%v\", me.AndroidPushSettings.Type)\n\n\tif me.AndroidPushSettings.AndroidAPIKey == \"\" {\n\t\tme.logger.Error(\"Android push notifications not configured. Missing AndroidAPIKey.\")\n\t\treturn false\n\t}\n\n\treturn true\n}\n\nfunc (me *AndroidNotificationServer) SendNotification(msg *PushNotification) PushResponse {\n\tpushType := msg.Type\n\tdata := map[string]interface{}{\n\t\t\"ack_id\": msg.AckID,\n\t\t\"type\": pushType,\n\t\t\"badge\": msg.Badge,\n\t\t\"version\": msg.Version,\n\t\t\"channel_id\": msg.ChannelID,\n\t}\n\n\tif msg.IsIDLoaded {\n\t\tdata[\"post_id\"] = msg.PostID\n\t\tdata[\"message\"] = msg.Message\n\t\tdata[\"id_loaded\"] = true\n\t\tdata[\"sender_id\"] = msg.SenderID\n\t\tdata[\"sender_name\"] = \"Someone\"\n\t} else if pushType == PushTypeMessage || pushType == PushTypeSession {\n\t\tdata[\"team_id\"] = msg.TeamID\n\t\tdata[\"sender_id\"] = msg.SenderID\n\t\tdata[\"sender_name\"] = msg.SenderName\n\t\tdata[\"message\"] = emoji.Sprint(msg.Message)\n\t\tdata[\"channel_name\"] = msg.ChannelName\n\t\tdata[\"post_id\"] = msg.PostID\n\t\tdata[\"root_id\"] = msg.RootID\n\t\tdata[\"override_username\"] = msg.OverrideUsername\n\t\tdata[\"override_icon_url\"] = msg.OverrideIconURL\n\t\tdata[\"from_webhook\"] = msg.FromWebhook\n\t}\n\n\tif me.metrics != nil {\n\t\tme.metrics.incrementNotificationTotal(PushNotifyAndroid, pushType)\n\t}\n\tfcmMsg := &fcm.Message{\n\t\tTo: msg.DeviceID,\n\t\tData: data,\n\t\tPriority: \"high\",\n\t}\n\n\tif me.AndroidPushSettings.AndroidAPIKey != \"\" {\n\t\tsender, err := fcm.NewClient(me.AndroidPushSettings.AndroidAPIKey)\n\t\tif err != nil {\n\t\t\tif me.metrics != nil {\n\t\t\t\tme.metrics.incrementFailure(PushNotifyAndroid, pushType, \"invalid ApiKey\")\n\t\t\t}\n\t\t\treturn NewErrorPushResponse(err.Error())\n\t\t}\n\n\t\tme.logger.Infof(\"Sending android push notification for device=%v and type=%v\", me.AndroidPushSettings.Type, msg.Type)\n\n\t\tstart := time.Now()\n\t\tresp, err := sender.SendWithRetry(fcmMsg, 2)\n\t\tif me.metrics != nil {\n\t\t\tme.metrics.observerNotificationResponse(PushNotifyAndroid, time.Since(start).Seconds())\n\t\t}\n\n\t\tif err != nil {\n\t\t\tme.logger.Errorf(\"Failed to send FCM push sid=%v did=%v err=%v type=%v\", msg.ServerID, msg.DeviceID, err, me.AndroidPushSettings.Type)\n\t\t\tif me.metrics != nil {\n\t\t\t\tme.metrics.incrementFailure(PushNotifyAndroid, pushType, \"unknown transport error\")\n\t\t\t}\n\t\t\treturn NewErrorPushResponse(\"unknown transport error\")\n\t\t}\n\n\t\tif resp.Failure > 0 {\n\t\t\tfcmError := resp.Results[0].Error\n\n\t\t\tif fcmError == fcm.ErrInvalidRegistration || fcmError == fcm.ErrNotRegistered || fcmError == fcm.ErrMissingRegistration {\n\t\t\t\tme.logger.Infof(\"Android response failure sending remove code: %v type=%v\", resp, me.AndroidPushSettings.Type)\n\t\t\t\tif me.metrics != nil {\n\t\t\t\t\tme.metrics.incrementRemoval(PushNotifyAndroid, pushType, fcmError.Error())\n\t\t\t\t}\n\t\t\t\treturn NewRemovePushResponse()\n\t\t\t}\n\n\t\t\tme.logger.Errorf(\"Android response failure: %v type=%v\", resp, me.AndroidPushSettings.Type)\n\t\t\tif me.metrics != nil {\n\t\t\t\tme.metrics.incrementFailure(PushNotifyAndroid, pushType, fcmError.Error())\n\t\t\t}\n\t\t\treturn NewErrorPushResponse(fcmError.Error())\n\t\t}\n\t}\n\n\tif me.metrics != nil {\n\t\tif msg.AckID != \"\" {\n\t\t\tme.metrics.incrementSuccessWithAck(PushNotifyAndroid, pushType)\n\t\t} else {\n\t\t\tme.metrics.incrementSuccess(PushNotifyAndroid, pushType)\n\t\t}\n\t}\n\treturn NewOkPushResponse()\n}\n","avg_line_length":30.6356589147,"max_line_length":137,"alphanum_fraction":0.7097672065} +{"size":1430,"ext":"go","lang":"Go","max_stars_count":null,"content":"\/\/ Hashed PROCesseS (hprocs)\npackage main\n\nimport (\n\tlog \"gopkg.in\/inconshreveable\/log15.v2\"\n\t\"hash\/fnv\"\n\t\"sync\"\n)\n\ntype ProcInfo interface {\n\tString() string\n\tFn(log.Logger, int) []ProcInfo\n}\n\ntype proc chan ProcInfo\n\ntype procs struct {\n\twg *sync.WaitGroup\n\tchans []proc\n}\n\n\/\/ 1) Pull off from corresponding channel\n\/\/ 2) Process request\n\/\/ 3) Remove from WaitGroup\nfunc (ps *procs) listen(l log.Logger, i int) {\n\tl = l.New(\"thd\", i)\n\tfor pi := range ps.chans[i] {\n\t\tps.spawnFill(pi.Fn(l, i))\n\t\tps.wg.Done()\n\t}\n}\n\n\/\/ 1) Add to WaitGroup\n\/\/ 2) Kick off process to fill channels\nfunc (ps *procs) spawnFill(pis []ProcInfo) {\n\tif len(pis) > 0 {\n\t\tps.wg.Add(len(pis))\n\t\tgo ps.fill(pis)\n\t}\n}\n\n\/\/ TODO: If we end up blocking on a channel\n\/\/ we can convert this to non-blocking by\n\/\/ utilizing a select and rotating through\n\/\/ the ProcInfo List to try other channels.\nfunc (ps *procs) fill(pis []ProcInfo) {\n\tfor len(pis) > 0 {\n\t\tps.chans[ChannelPicker(pis[0].String(), len(ps.chans))] <- pis[0]\n\t\tpis = pis[1:]\n\t}\n}\n\nfunc ChannelPicker(str string, num int) int {\n\th := fnv.New64()\n\th.Write([]byte(str))\n\treturn int(h.Sum64() % uint64(num))\n}\n\nfunc Run(l log.Logger, num int, pis []ProcInfo) {\n\tchans := make([]proc, num)\n\tvar wg sync.WaitGroup\n\tps := procs{wg: &wg, chans: chans}\n\tfor i := range chans {\n\t\tchans[i] = make(proc, num)\n\t\tgo ps.listen(l, i)\n\t}\n\tps.spawnFill(pis)\n\twg.Wait()\n\tfor _, ch := range chans {\n\t\tclose(ch)\n\t}\n}\n","avg_line_length":19.5890410959,"max_line_length":67,"alphanum_fraction":0.6468531469} +{"size":18303,"ext":"go","lang":"Go","max_stars_count":null,"content":"package evidence\n\nimport (\n\t\"bytes\"\n\t\"errors\"\n\t\"fmt\"\n\t\"sync\"\n\t\"sync\/atomic\"\n\t\"time\"\n\n\t\"github.com\/gogo\/protobuf\/proto\"\n\tgogotypes \"github.com\/gogo\/protobuf\/types\"\n\tdbm \"github.com\/tendermint\/tm-db\"\n\n\tclist \"github.com\/mihongtech\/tendermint\/libs\/clist\"\n\t\"github.com\/mihongtech\/tendermint\/libs\/log\"\n\ttmproto \"github.com\/mihongtech\/tendermint\/proto\/tendermint\/types\"\n\tsm \"github.com\/mihongtech\/tendermint\/state\"\n\t\"github.com\/mihongtech\/tendermint\/types\"\n)\n\nconst (\n\tbaseKeyCommitted = byte(0x00)\n\tbaseKeyPending = byte(0x01)\n)\n\n\/\/ Pool maintains a pool of valid evidence to be broadcasted and committed\ntype Pool struct {\n\tlogger log.Logger\n\n\tevidenceStore dbm.DB\n\tevidenceList *clist.CList \/\/ concurrent linked-list of evidence\n\tevidenceSize uint32 \/\/ amount of pending evidence\n\n\t\/\/ needed to load validators to verify evidence\n\tstateDB sm.Store\n\t\/\/ needed to load headers and commits to verify evidence\n\tblockStore BlockStore\n\n\tmtx sync.Mutex\n\t\/\/ latest state\n\tstate sm.State\n\t\/\/ evidence from consensus is buffered to this slice, awaiting until the next height\n\t\/\/ before being flushed to the pool. This prevents broadcasting and proposing of\n\t\/\/ evidence before the height with which the evidence happened is finished.\n\tconsensusBuffer []duplicateVoteSet\n\n\tpruningHeight int64\n\tpruningTime time.Time\n}\n\n\/\/ NewPool creates an evidence pool. If using an existing evidence store,\n\/\/ it will add all pending evidence to the concurrent list.\nfunc NewPool(evidenceDB dbm.DB, stateDB sm.Store, blockStore BlockStore) (*Pool, error) {\n\n\tstate, err := stateDB.Load()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"cannot load state: %w\", err)\n\t}\n\n\tpool := &Pool{\n\t\tstateDB: stateDB,\n\t\tblockStore: blockStore,\n\t\tstate: state,\n\t\tlogger: log.NewNopLogger(),\n\t\tevidenceStore: evidenceDB,\n\t\tevidenceList: clist.New(),\n\t\tconsensusBuffer: make([]duplicateVoteSet, 0),\n\t}\n\n\t\/\/ if pending evidence already in db, in event of prior failure, then check for expiration,\n\t\/\/ update the size and load it back to the evidenceList\n\tpool.pruningHeight, pool.pruningTime = pool.removeExpiredPendingEvidence()\n\tevList, _, err := pool.listEvidence(baseKeyPending, -1)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tatomic.StoreUint32(&pool.evidenceSize, uint32(len(evList)))\n\tfor _, ev := range evList {\n\t\tpool.evidenceList.PushBack(ev)\n\t}\n\n\treturn pool, nil\n}\n\n\/\/ PendingEvidence is used primarily as part of block proposal and returns up to maxNum of uncommitted evidence.\nfunc (evpool *Pool) PendingEvidence(maxBytes int64) ([]types.Evidence, int64) {\n\tif evpool.Size() == 0 {\n\t\treturn []types.Evidence{}, 0\n\t}\n\tevidence, size, err := evpool.listEvidence(baseKeyPending, maxBytes)\n\tif err != nil {\n\t\tevpool.logger.Error(\"Unable to retrieve pending evidence\", \"err\", err)\n\t}\n\treturn evidence, size\n}\n\n\/\/ Update takes both the new state and the evidence committed at that height and performs\n\/\/ the following operations:\n\/\/ 1. Take any conflicting votes from consensus and use the state's LastBlockTime to form\n\/\/ DuplicateVoteEvidence and add it to the pool.\n\/\/ 2. Update the pool's state which contains evidence params relating to expiry.\n\/\/ 3. Moves pending evidence that has now been committed into the committed pool.\n\/\/ 4. Removes any expired evidence based on both height and time.\nfunc (evpool *Pool) Update(state sm.State, ev types.EvidenceList) {\n\t\/\/ sanity check\n\tif state.LastBlockHeight <= evpool.state.LastBlockHeight {\n\t\tpanic(fmt.Sprintf(\n\t\t\t\"failed EvidencePool.Update new state height is less than or equal to previous state height: %d <= %d\",\n\t\t\tstate.LastBlockHeight,\n\t\t\tevpool.state.LastBlockHeight,\n\t\t))\n\t}\n\tevpool.logger.Debug(\"Updating evidence pool\", \"last_block_height\", state.LastBlockHeight,\n\t\t\"last_block_time\", state.LastBlockTime)\n\n\t\/\/ flush conflicting vote pairs from the buffer, producing DuplicateVoteEvidence and\n\t\/\/ adding it to the pool\n\tevpool.processConsensusBuffer(state)\n\t\/\/ update state\n\tevpool.updateState(state)\n\n\t\/\/ move committed evidence out from the pending pool and into the committed pool\n\tevpool.markEvidenceAsCommitted(ev)\n\n\t\/\/ prune pending evidence when it has expired. This also updates when the next evidence will expire\n\tif evpool.Size() > 0 && state.LastBlockHeight > evpool.pruningHeight &&\n\t\tstate.LastBlockTime.After(evpool.pruningTime) {\n\t\tevpool.pruningHeight, evpool.pruningTime = evpool.removeExpiredPendingEvidence()\n\t}\n}\n\n\/\/ AddEvidence checks the evidence is valid and adds it to the pool.\nfunc (evpool *Pool) AddEvidence(ev types.Evidence) error {\n\tevpool.logger.Debug(\"Attempting to add evidence\", \"ev\", ev)\n\n\t\/\/ We have already verified this piece of evidence - no need to do it again\n\tif evpool.isPending(ev) {\n\t\tevpool.logger.Debug(\"Evidence already pending, ignoring this one\", \"ev\", ev)\n\t\treturn nil\n\t}\n\n\t\/\/ check that the evidence isn't already committed\n\tif evpool.isCommitted(ev) {\n\t\t\/\/ this can happen if the peer that sent us the evidence is behind so we shouldn't\n\t\t\/\/ punish the peer.\n\t\tevpool.logger.Debug(\"Evidence was already committed, ignoring this one\", \"ev\", ev)\n\t\treturn nil\n\t}\n\n\t\/\/ 1) Verify against state.\n\terr := evpool.verify(ev)\n\tif err != nil {\n\t\treturn types.NewErrInvalidEvidence(ev, err)\n\t}\n\n\t\/\/ 2) Save to store.\n\tif err := evpool.addPendingEvidence(ev); err != nil {\n\t\treturn fmt.Errorf(\"can't add evidence to pending list: %w\", err)\n\t}\n\n\t\/\/ 3) Add evidence to clist.\n\tevpool.evidenceList.PushBack(ev)\n\n\tevpool.logger.Info(\"Verified new evidence of byzantine behavior\", \"evidence\", ev)\n\n\treturn nil\n}\n\n\/\/ ReportConflictingVotes takes two conflicting votes and forms duplicate vote evidence,\n\/\/ adding it eventually to the evidence pool.\n\/\/\n\/\/ Duplicate vote attacks happen before the block is committed and the timestamp is\n\/\/ finalized, thus the evidence pool holds these votes in a buffer, forming the\n\/\/ evidence from them once consensus at that height has been reached and `Update()` with\n\/\/ the new state called.\n\/\/\n\/\/ Votes are not verified.\nfunc (evpool *Pool) ReportConflictingVotes(voteA, voteB *types.Vote) {\n\tevpool.mtx.Lock()\n\tdefer evpool.mtx.Unlock()\n\tevpool.consensusBuffer = append(evpool.consensusBuffer, duplicateVoteSet{\n\t\tVoteA: voteA,\n\t\tVoteB: voteB,\n\t})\n}\n\n\/\/ CheckEvidence takes an array of evidence from a block and verifies all the evidence there.\n\/\/ If it has already verified the evidence then it jumps to the next one. It ensures that no\n\/\/ evidence has already been committed or is being proposed twice. It also adds any\n\/\/ evidence that it doesn't currently have so that it can quickly form ABCI Evidence later.\nfunc (evpool *Pool) CheckEvidence(evList types.EvidenceList) error {\n\thashes := make([][]byte, len(evList))\n\tfor idx, ev := range evList {\n\n\t\t_, isLightEv := ev.(*types.LightClientAttackEvidence)\n\n\t\t\/\/ We must verify light client attack evidence regardless because there could be a\n\t\t\/\/ different conflicting block with the same hash.\n\t\tif isLightEv || !evpool.isPending(ev) {\n\t\t\t\/\/ check that the evidence isn't already committed\n\t\t\tif evpool.isCommitted(ev) {\n\t\t\t\treturn &types.ErrInvalidEvidence{Evidence: ev, Reason: errors.New(\"evidence was already committed\")}\n\t\t\t}\n\n\t\t\terr := evpool.verify(ev)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\tif err := evpool.addPendingEvidence(ev); err != nil {\n\t\t\t\t\/\/ Something went wrong with adding the evidence but we already know it is valid\n\t\t\t\t\/\/ hence we log an error and continue\n\t\t\t\tevpool.logger.Error(\"Can't add evidence to pending list\", \"err\", err, \"ev\", ev)\n\t\t\t}\n\n\t\t\tevpool.logger.Info(\"Check evidence: verified evidence of byzantine behavior\", \"evidence\", ev)\n\t\t}\n\n\t\t\/\/ check for duplicate evidence. We cache hashes so we don't have to work them out again.\n\t\thashes[idx] = ev.Hash()\n\t\tfor i := idx - 1; i >= 0; i-- {\n\t\t\tif bytes.Equal(hashes[i], hashes[idx]) {\n\t\t\t\treturn &types.ErrInvalidEvidence{Evidence: ev, Reason: errors.New(\"duplicate evidence\")}\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}\n\n\/\/ EvidenceFront goes to the first evidence in the clist\nfunc (evpool *Pool) EvidenceFront() *clist.CElement {\n\treturn evpool.evidenceList.Front()\n}\n\n\/\/ EvidenceWaitChan is a channel that closes once the first evidence in the list is there. i.e Front is not nil\nfunc (evpool *Pool) EvidenceWaitChan() <-chan struct{} {\n\treturn evpool.evidenceList.WaitChan()\n}\n\n\/\/ SetLogger sets the Logger.\nfunc (evpool *Pool) SetLogger(l log.Logger) {\n\tevpool.logger = l\n}\n\n\/\/ Size returns the number of evidence in the pool.\nfunc (evpool *Pool) Size() uint32 {\n\treturn atomic.LoadUint32(&evpool.evidenceSize)\n}\n\n\/\/ State returns the current state of the evpool.\nfunc (evpool *Pool) State() sm.State {\n\tevpool.mtx.Lock()\n\tdefer evpool.mtx.Unlock()\n\treturn evpool.state\n}\n\n\/\/ IsExpired checks whether evidence or a polc is expired by checking whether a height and time is older\n\/\/ than set by the evidence consensus parameters\nfunc (evpool *Pool) isExpired(height int64, time time.Time) bool {\n\tvar (\n\t\tparams = evpool.State().ConsensusParams.Evidence\n\t\tageDuration = evpool.State().LastBlockTime.Sub(time)\n\t\tageNumBlocks = evpool.State().LastBlockHeight - height\n\t)\n\treturn ageNumBlocks > params.MaxAgeNumBlocks &&\n\t\tageDuration > params.MaxAgeDuration\n}\n\n\/\/ IsCommitted returns true if we have already seen this exact evidence and it is already marked as committed.\nfunc (evpool *Pool) isCommitted(evidence types.Evidence) bool {\n\tkey := keyCommitted(evidence)\n\tok, err := evpool.evidenceStore.Has(key)\n\tif err != nil {\n\t\tevpool.logger.Error(\"Unable to find committed evidence\", \"err\", err)\n\t}\n\treturn ok\n}\n\n\/\/ IsPending checks whether the evidence is already pending. DB errors are passed to the logger.\nfunc (evpool *Pool) isPending(evidence types.Evidence) bool {\n\tkey := keyPending(evidence)\n\tok, err := evpool.evidenceStore.Has(key)\n\tif err != nil {\n\t\tevpool.logger.Error(\"Unable to find pending evidence\", \"err\", err)\n\t}\n\treturn ok\n}\n\nfunc (evpool *Pool) addPendingEvidence(ev types.Evidence) error {\n\tevpb, err := types.EvidenceToProto(ev)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"unable to convert to proto, err: %w\", err)\n\t}\n\n\tevBytes, err := evpb.Marshal()\n\tif err != nil {\n\t\treturn fmt.Errorf(\"unable to marshal evidence: %w\", err)\n\t}\n\n\tkey := keyPending(ev)\n\n\terr = evpool.evidenceStore.Set(key, evBytes)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"can't persist evidence: %w\", err)\n\t}\n\tatomic.AddUint32(&evpool.evidenceSize, 1)\n\treturn nil\n}\n\nfunc (evpool *Pool) removePendingEvidence(evidence types.Evidence) {\n\tkey := keyPending(evidence)\n\tif err := evpool.evidenceStore.Delete(key); err != nil {\n\t\tevpool.logger.Error(\"Unable to delete pending evidence\", \"err\", err)\n\t} else {\n\t\tatomic.AddUint32(&evpool.evidenceSize, ^uint32(0))\n\t\tevpool.logger.Debug(\"Deleted pending evidence\", \"evidence\", evidence)\n\t}\n}\n\n\/\/ markEvidenceAsCommitted processes all the evidence in the block, marking it as\n\/\/ committed and removing it from the pending database.\nfunc (evpool *Pool) markEvidenceAsCommitted(evidence types.EvidenceList) {\n\tblockEvidenceMap := make(map[string]struct{}, len(evidence))\n\tfor _, ev := range evidence {\n\t\tif evpool.isPending(ev) {\n\t\t\tevpool.removePendingEvidence(ev)\n\t\t\tblockEvidenceMap[evMapKey(ev)] = struct{}{}\n\t\t}\n\n\t\t\/\/ Add evidence to the committed list. As the evidence is stored in the block store\n\t\t\/\/ we only need to record the height that it was saved at.\n\t\tkey := keyCommitted(ev)\n\n\t\th := gogotypes.Int64Value{Value: ev.Height()}\n\t\tevBytes, err := proto.Marshal(&h)\n\t\tif err != nil {\n\t\t\tevpool.logger.Error(\"failed to marshal committed evidence\", \"err\", err, \"key(height\/hash)\", key)\n\t\t\tcontinue\n\t\t}\n\n\t\tif err := evpool.evidenceStore.Set(key, evBytes); err != nil {\n\t\t\tevpool.logger.Error(\"Unable to save committed evidence\", \"err\", err, \"key(height\/hash)\", key)\n\t\t}\n\t}\n\n\t\/\/ remove committed evidence from the clist\n\tif len(blockEvidenceMap) != 0 {\n\t\tevpool.removeEvidenceFromList(blockEvidenceMap)\n\t}\n}\n\n\/\/ listEvidence retrieves lists evidence from oldest to newest within maxBytes.\n\/\/ If maxBytes is -1, there's no cap on the size of returned evidence.\nfunc (evpool *Pool) listEvidence(prefixKey byte, maxBytes int64) ([]types.Evidence, int64, error) {\n\tvar (\n\t\tevSize int64\n\t\ttotalSize int64\n\t\tevidence []types.Evidence\n\t\tevList tmproto.EvidenceList \/\/ used for calculating the bytes size\n\t)\n\n\titer, err := dbm.IteratePrefix(evpool.evidenceStore, []byte{prefixKey})\n\tif err != nil {\n\t\treturn nil, totalSize, fmt.Errorf(\"database error: %v\", err)\n\t}\n\tdefer iter.Close()\n\tfor ; iter.Valid(); iter.Next() {\n\t\tvar evpb tmproto.Evidence\n\t\terr := evpb.Unmarshal(iter.Value())\n\t\tif err != nil {\n\t\t\treturn evidence, totalSize, err\n\t\t}\n\t\tevList.Evidence = append(evList.Evidence, evpb)\n\t\tevSize = int64(evList.Size())\n\t\tif maxBytes != -1 && evSize > maxBytes {\n\t\t\tif err := iter.Error(); err != nil {\n\t\t\t\treturn evidence, totalSize, err\n\t\t\t}\n\t\t\treturn evidence, totalSize, nil\n\t\t}\n\n\t\tev, err := types.EvidenceFromProto(&evpb)\n\t\tif err != nil {\n\t\t\treturn nil, totalSize, err\n\t\t}\n\n\t\ttotalSize = evSize\n\t\tevidence = append(evidence, ev)\n\t}\n\n\tif err := iter.Error(); err != nil {\n\t\treturn evidence, totalSize, err\n\t}\n\treturn evidence, totalSize, nil\n}\n\nfunc (evpool *Pool) removeExpiredPendingEvidence() (int64, time.Time) {\n\titer, err := dbm.IteratePrefix(evpool.evidenceStore, []byte{baseKeyPending})\n\tif err != nil {\n\t\tevpool.logger.Error(\"Unable to iterate over pending evidence\", \"err\", err)\n\t\treturn evpool.State().LastBlockHeight, evpool.State().LastBlockTime\n\t}\n\tdefer iter.Close()\n\tblockEvidenceMap := make(map[string]struct{})\n\tfor ; iter.Valid(); iter.Next() {\n\t\tev, err := bytesToEv(iter.Value())\n\t\tif err != nil {\n\t\t\tevpool.logger.Error(\"Error in transition evidence from protobuf\", \"err\", err)\n\t\t\tcontinue\n\t\t}\n\t\tif !evpool.isExpired(ev.Height(), ev.Time()) {\n\t\t\tif len(blockEvidenceMap) != 0 {\n\t\t\t\tevpool.removeEvidenceFromList(blockEvidenceMap)\n\t\t\t}\n\n\t\t\t\/\/ return the height and time with which this evidence will have expired so we know when to prune next\n\t\t\treturn ev.Height() + evpool.State().ConsensusParams.Evidence.MaxAgeNumBlocks + 1,\n\t\t\t\tev.Time().Add(evpool.State().ConsensusParams.Evidence.MaxAgeDuration).Add(time.Second)\n\t\t}\n\t\tevpool.removePendingEvidence(ev)\n\t\tblockEvidenceMap[evMapKey(ev)] = struct{}{}\n\t}\n\t\/\/ We either have no pending evidence or all evidence has expired\n\tif len(blockEvidenceMap) != 0 {\n\t\tevpool.removeEvidenceFromList(blockEvidenceMap)\n\t}\n\treturn evpool.State().LastBlockHeight, evpool.State().LastBlockTime\n}\n\nfunc (evpool *Pool) removeEvidenceFromList(\n\tblockEvidenceMap map[string]struct{}) {\n\n\tfor e := evpool.evidenceList.Front(); e != nil; e = e.Next() {\n\t\t\/\/ Remove from clist\n\t\tev := e.Value.(types.Evidence)\n\t\tif _, ok := blockEvidenceMap[evMapKey(ev)]; ok {\n\t\t\tevpool.evidenceList.Remove(e)\n\t\t\te.DetachPrev()\n\t\t}\n\t}\n}\n\nfunc (evpool *Pool) updateState(state sm.State) {\n\tevpool.mtx.Lock()\n\tdefer evpool.mtx.Unlock()\n\tevpool.state = state\n}\n\n\/\/ processConsensusBuffer converts all the duplicate votes witnessed from consensus\n\/\/ into DuplicateVoteEvidence. It sets the evidence timestamp to the block height\n\/\/ from the most recently committed block.\n\/\/ Evidence is then added to the pool so as to be ready to be broadcasted and proposed.\nfunc (evpool *Pool) processConsensusBuffer(state sm.State) {\n\tevpool.mtx.Lock()\n\tdefer evpool.mtx.Unlock()\n\tfor _, voteSet := range evpool.consensusBuffer {\n\n\t\t\/\/ Check the height of the conflicting votes and fetch the corresponding time and validator set\n\t\t\/\/ to produce the valid evidence\n\t\tvar dve *types.DuplicateVoteEvidence\n\t\tswitch {\n\t\tcase voteSet.VoteA.Height == state.LastBlockHeight:\n\t\t\tdve = types.NewDuplicateVoteEvidence(\n\t\t\t\tvoteSet.VoteA,\n\t\t\t\tvoteSet.VoteB,\n\t\t\t\tstate.LastBlockTime,\n\t\t\t\tstate.LastValidators,\n\t\t\t)\n\n\t\tcase voteSet.VoteA.Height < state.LastBlockHeight:\n\t\t\tvalSet, err := evpool.stateDB.LoadValidators(voteSet.VoteA.Height)\n\t\t\tif err != nil {\n\t\t\t\tevpool.logger.Error(\"failed to load validator set for conflicting votes\", \"height\",\n\t\t\t\t\tvoteSet.VoteA.Height, \"err\", err,\n\t\t\t\t)\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tblockMeta := evpool.blockStore.LoadBlockMeta(voteSet.VoteA.Height)\n\t\t\tif blockMeta == nil {\n\t\t\t\tevpool.logger.Error(\"failed to load block time for conflicting votes\", \"height\", voteSet.VoteA.Height)\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tdve = types.NewDuplicateVoteEvidence(\n\t\t\t\tvoteSet.VoteA,\n\t\t\t\tvoteSet.VoteB,\n\t\t\t\tblockMeta.Header.Time,\n\t\t\t\tvalSet,\n\t\t\t)\n\n\t\tdefault:\n\t\t\t\/\/ evidence pool shouldn't expect to get votes from consensus of a height that is above the current\n\t\t\t\/\/ state. If this error is seen then perhaps consider keeping the votes in the buffer and retry\n\t\t\t\/\/ in following heights\n\t\t\tevpool.logger.Error(\"inbound duplicate votes from consensus are of a greater height than current state\",\n\t\t\t\t\"duplicate vote height\", voteSet.VoteA.Height,\n\t\t\t\t\"state.LastBlockHeight\", state.LastBlockHeight)\n\t\t\tcontinue\n\t\t}\n\n\t\t\/\/ check if we already have this evidence\n\t\tif evpool.isPending(dve) {\n\t\t\tevpool.logger.Debug(\"evidence already pending; ignoring\", \"evidence\", dve)\n\t\t\tcontinue\n\t\t}\n\n\t\t\/\/ check that the evidence is not already committed on chain\n\t\tif evpool.isCommitted(dve) {\n\t\t\tevpool.logger.Debug(\"evidence already committed; ignoring\", \"evidence\", dve)\n\t\t\tcontinue\n\t\t}\n\n\t\tif err := evpool.addPendingEvidence(dve); err != nil {\n\t\t\tevpool.logger.Error(\"failed to flush evidence from consensus buffer to pending list: %w\", err)\n\t\t\tcontinue\n\t\t}\n\n\t\tevpool.evidenceList.PushBack(dve)\n\n\t\tevpool.logger.Info(\"verified new evidence of byzantine behavior\", \"evidence\", dve)\n\t}\n\t\/\/ reset consensus buffer\n\tevpool.consensusBuffer = make([]duplicateVoteSet, 0)\n}\n\ntype duplicateVoteSet struct {\n\tVoteA *types.Vote\n\tVoteB *types.Vote\n}\n\nfunc bytesToEv(evBytes []byte) (types.Evidence, error) {\n\tvar evpb tmproto.Evidence\n\terr := evpb.Unmarshal(evBytes)\n\tif err != nil {\n\t\treturn &types.DuplicateVoteEvidence{}, err\n\t}\n\n\treturn types.EvidenceFromProto(&evpb)\n}\n\nfunc evMapKey(ev types.Evidence) string {\n\treturn string(ev.Hash())\n}\n\n\/\/ big endian padded hex\nfunc bE(h int64) string {\n\treturn fmt.Sprintf(\"%0.16X\", h)\n}\n\nfunc keyCommitted(evidence types.Evidence) []byte {\n\treturn append([]byte{baseKeyCommitted}, keySuffix(evidence)...)\n}\n\nfunc keyPending(evidence types.Evidence) []byte {\n\treturn append([]byte{baseKeyPending}, keySuffix(evidence)...)\n}\n\nfunc keySuffix(evidence types.Evidence) []byte {\n\treturn []byte(fmt.Sprintf(\"%s\/%X\", bE(evidence.Height()), evidence.Hash()))\n}\n","avg_line_length":32.5676156584,"max_line_length":112,"alphanum_fraction":0.7248538491} +{"size":1543,"ext":"go","lang":"Go","max_stars_count":1.0,"content":"\/*\nCopyright \u00a9 2019 NAME HERE \n\nLicensed under the Apache License, Version 2.0 (the \"License\");\nyou may not use this file except in compliance with the License.\nYou may obtain a copy of the License at\n\n http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\nUnless required by applicable law or agreed to in writing, software\ndistributed under the License is distributed on an \"AS IS\" BASIS,\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\nSee the License for the specific language governing permissions and\nlimitations under the License.\n*\/\npackage cmd\n\nimport (\n\t\"github.com\/spf13\/cobra\"\n\tdriveWrapper \"main\/drive\"\n)\n\n\/\/ removeCmd represents the remove command\nvar removeCmd = &cobra.Command{\n\tUse: \"remove\",\n\tShort: \"Removes a key-value pair\",\n\tLong: `Sick of all these key-value pairs? Give us the key, and we'll nuke it, so you\ncan get back binging that new season of Downton Abbey.\n\nSpoiler alert: Robert dies`,\n\tArgs: cobra.ExactArgs(1),\n\tRun: func(cmd *cobra.Command, args []string) {\n\t\tdriveWrapper.RemoveKeyFromFile(args[0], verbose)\n\t},\n}\n\nfunc init() {\n\trootCmd.AddCommand(removeCmd)\n\n\t\/\/ Here you will define your flags and configuration settings.\n\n\t\/\/ Cobra supports Persistent Flags which will work for this command\n\t\/\/ and all subcommands, e.g.:\n\t\/\/ removeCmd.PersistentFlags().String(\"foo\", \"\", \"A help for foo\")\n\n\t\/\/ Cobra supports local flags which will only run when this command\n\t\/\/ is called directly, e.g.:\n\t\/\/ removeCmd.Flags().BoolP(\"toggle\", \"t\", false, \"Help message for toggle\")\n}\n","avg_line_length":30.86,"max_line_length":85,"alphanum_fraction":0.7401166559} +{"size":6609,"ext":"go","lang":"Go","max_stars_count":46.0,"content":"package model\n\nimport (\n\t\"encoding\/json\"\n\n\t\"errors\"\n\t\"github.com\/huaweicloud\/huaweicloud-sdk-go-v3\/core\/converter\"\n\n\t\"strings\"\n)\n\n\/\/ \u5b50\u4efb\u52a1\u4fe1\u606f\u4f53\ntype ChildrenJobInfo struct {\n\t\/\/ \u8ba1\u8d39\u5b57\u6bb5\n\n\tBillingTag string `json:\"billing_tag\"`\n\t\/\/ \u4efb\u52a1\u521b\u5efa\u65f6\u95f4\n\n\tCreateTime string `json:\"create_time\"`\n\t\/\/ \u590d\u5236\u573a\u666f\n\n\tDbUseType ChildrenJobInfoDbUseType `json:\"db_use_type\"`\n\t\/\/ \u4efb\u52a1\u63cf\u8ff0\n\n\tDescription string `json:\"description\"`\n\t\/\/ \u5f15\u64ce\u7c7b\u578b\n\n\tEngineType ChildrenJobInfoEngineType `json:\"engine_type\"`\n\t\/\/ \u4efb\u52a1\u5931\u8d25\u539f\u56e0\n\n\tErrorMsg string `json:\"error_msg\"`\n\t\/\/ \u4efb\u52a1id\n\n\tId string `json:\"id\"`\n\t\/\/ \u8fc1\u79fb\u65b9\u5411\n\n\tJobDirection ChildrenJobInfoJobDirection `json:\"job_direction\"`\n\t\/\/ \u4efb\u52a1\u540d\u79f0\n\n\tName string `json:\"name\"`\n\t\/\/ \u7f51\u7edc\u7c7b\u578b\n\n\tNetType ChildrenJobInfoNetType `json:\"net_type\"`\n\t\/\/ \u65b0\u6846\u67b6\n\n\tNodeNewFramework string `json:\"node_newFramework\"`\n\t\/\/ \u4efb\u52a1\u72b6\u6001\n\n\tStatus string `json:\"status\"`\n\t\/\/ \u8fc1\u79fb\u6a21\u5f0f\n\n\tTaskType ChildrenJobInfoTaskType `json:\"task_type\"`\n}\n\nfunc (o ChildrenJobInfo) String() string {\n\tdata, err := json.Marshal(o)\n\tif err != nil {\n\t\treturn \"ChildrenJobInfo struct{}\"\n\t}\n\n\treturn strings.Join([]string{\"ChildrenJobInfo\", string(data)}, \" \")\n}\n\ntype ChildrenJobInfoDbUseType struct {\n\tvalue string\n}\n\ntype ChildrenJobInfoDbUseTypeEnum struct {\n\tMIGRATION ChildrenJobInfoDbUseType\n\tSYNC ChildrenJobInfoDbUseType\n\tCLOUD_DATA_GUARD ChildrenJobInfoDbUseType\n}\n\nfunc GetChildrenJobInfoDbUseTypeEnum() ChildrenJobInfoDbUseTypeEnum {\n\treturn ChildrenJobInfoDbUseTypeEnum{\n\t\tMIGRATION: ChildrenJobInfoDbUseType{\n\t\t\tvalue: \"migration:\u5b9e\u65f6\u8fc1\u79fb\",\n\t\t},\n\t\tSYNC: ChildrenJobInfoDbUseType{\n\t\t\tvalue: \"sync:\u5b9e\u65f6\u540c\u6b65\",\n\t\t},\n\t\tCLOUD_DATA_GUARD: ChildrenJobInfoDbUseType{\n\t\t\tvalue: \"cloudDataGuard:\u5b9e\u65f6\u707e\u5907\",\n\t\t},\n\t}\n}\n\nfunc (c ChildrenJobInfoDbUseType) MarshalJSON() ([]byte, error) {\n\treturn json.Marshal(c.value)\n}\n\nfunc (c *ChildrenJobInfoDbUseType) UnmarshalJSON(b []byte) error {\n\tmyConverter := converter.StringConverterFactory(\"string\")\n\tif myConverter != nil {\n\t\tval, err := myConverter.CovertStringToInterface(strings.Trim(string(b[:]), \"\\\"\"))\n\t\tif err == nil {\n\t\t\tc.value = val.(string)\n\t\t\treturn nil\n\t\t}\n\t\treturn err\n\t} else {\n\t\treturn errors.New(\"convert enum data to string error\")\n\t}\n}\n\ntype ChildrenJobInfoEngineType struct {\n\tvalue string\n}\n\ntype ChildrenJobInfoEngineTypeEnum struct {\n\tCLOUD_DATA_GUARD_CASSANDRA ChildrenJobInfoEngineType\n\tCLOUD_DATA_GUARD_DDM ChildrenJobInfoEngineType\n\tCLOUD_DATA_GUARD_TAURUS_TO_MYSQL ChildrenJobInfoEngineType\n\tCLOUD_DATA_GUARD_MYSQL ChildrenJobInfoEngineType\n\tCLOUD_DATA_GUARD_MYSQL_TO_TAURUS ChildrenJobInfoEngineType\n}\n\nfunc GetChildrenJobInfoEngineTypeEnum() ChildrenJobInfoEngineTypeEnum {\n\treturn ChildrenJobInfoEngineTypeEnum{\n\t\tCLOUD_DATA_GUARD_CASSANDRA: ChildrenJobInfoEngineType{\n\t\t\tvalue: \"cloudDataGuard-cassandra\",\n\t\t},\n\t\tCLOUD_DATA_GUARD_DDM: ChildrenJobInfoEngineType{\n\t\t\tvalue: \"cloudDataGuard-ddm\",\n\t\t},\n\t\tCLOUD_DATA_GUARD_TAURUS_TO_MYSQL: ChildrenJobInfoEngineType{\n\t\t\tvalue: \"cloudDataGuard-taurus-to-mysql\",\n\t\t},\n\t\tCLOUD_DATA_GUARD_MYSQL: ChildrenJobInfoEngineType{\n\t\t\tvalue: \"cloudDataGuard-mysql\",\n\t\t},\n\t\tCLOUD_DATA_GUARD_MYSQL_TO_TAURUS: ChildrenJobInfoEngineType{\n\t\t\tvalue: \"cloudDataGuard-mysql-to-taurus\",\n\t\t},\n\t}\n}\n\nfunc (c ChildrenJobInfoEngineType) MarshalJSON() ([]byte, error) {\n\treturn json.Marshal(c.value)\n}\n\nfunc (c *ChildrenJobInfoEngineType) UnmarshalJSON(b []byte) error {\n\tmyConverter := converter.StringConverterFactory(\"string\")\n\tif myConverter != nil {\n\t\tval, err := myConverter.CovertStringToInterface(strings.Trim(string(b[:]), \"\\\"\"))\n\t\tif err == nil {\n\t\t\tc.value = val.(string)\n\t\t\treturn nil\n\t\t}\n\t\treturn err\n\t} else {\n\t\treturn errors.New(\"convert enum data to string error\")\n\t}\n}\n\ntype ChildrenJobInfoJobDirection struct {\n\tvalue string\n}\n\ntype ChildrenJobInfoJobDirectionEnum struct {\n\tUP ChildrenJobInfoJobDirection\n\tDOWN ChildrenJobInfoJobDirection\n\tNO_DBS ChildrenJobInfoJobDirection\n}\n\nfunc GetChildrenJobInfoJobDirectionEnum() ChildrenJobInfoJobDirectionEnum {\n\treturn ChildrenJobInfoJobDirectionEnum{\n\t\tUP: ChildrenJobInfoJobDirection{\n\t\t\tvalue: \"up\",\n\t\t},\n\t\tDOWN: ChildrenJobInfoJobDirection{\n\t\t\tvalue: \"down\",\n\t\t},\n\t\tNO_DBS: ChildrenJobInfoJobDirection{\n\t\t\tvalue: \"no-dbs\",\n\t\t},\n\t}\n}\n\nfunc (c ChildrenJobInfoJobDirection) MarshalJSON() ([]byte, error) {\n\treturn json.Marshal(c.value)\n}\n\nfunc (c *ChildrenJobInfoJobDirection) UnmarshalJSON(b []byte) error {\n\tmyConverter := converter.StringConverterFactory(\"string\")\n\tif myConverter != nil {\n\t\tval, err := myConverter.CovertStringToInterface(strings.Trim(string(b[:]), \"\\\"\"))\n\t\tif err == nil {\n\t\t\tc.value = val.(string)\n\t\t\treturn nil\n\t\t}\n\t\treturn err\n\t} else {\n\t\treturn errors.New(\"convert enum data to string error\")\n\t}\n}\n\ntype ChildrenJobInfoNetType struct {\n\tvalue string\n}\n\ntype ChildrenJobInfoNetTypeEnum struct {\n\tVPC ChildrenJobInfoNetType\n\tVPN ChildrenJobInfoNetType\n\tEIP ChildrenJobInfoNetType\n}\n\nfunc GetChildrenJobInfoNetTypeEnum() ChildrenJobInfoNetTypeEnum {\n\treturn ChildrenJobInfoNetTypeEnum{\n\t\tVPC: ChildrenJobInfoNetType{\n\t\t\tvalue: \"vpc\",\n\t\t},\n\t\tVPN: ChildrenJobInfoNetType{\n\t\t\tvalue: \"vpn\",\n\t\t},\n\t\tEIP: ChildrenJobInfoNetType{\n\t\t\tvalue: \"eip\",\n\t\t},\n\t}\n}\n\nfunc (c ChildrenJobInfoNetType) MarshalJSON() ([]byte, error) {\n\treturn json.Marshal(c.value)\n}\n\nfunc (c *ChildrenJobInfoNetType) UnmarshalJSON(b []byte) error {\n\tmyConverter := converter.StringConverterFactory(\"string\")\n\tif myConverter != nil {\n\t\tval, err := myConverter.CovertStringToInterface(strings.Trim(string(b[:]), \"\\\"\"))\n\t\tif err == nil {\n\t\t\tc.value = val.(string)\n\t\t\treturn nil\n\t\t}\n\t\treturn err\n\t} else {\n\t\treturn errors.New(\"convert enum data to string error\")\n\t}\n}\n\ntype ChildrenJobInfoTaskType struct {\n\tvalue string\n}\n\ntype ChildrenJobInfoTaskTypeEnum struct {\n\tFULL_TRANS ChildrenJobInfoTaskType\n\tFULL_INCR_TRANS ChildrenJobInfoTaskType\n\tINCR_TRANS ChildrenJobInfoTaskType\n}\n\nfunc GetChildrenJobInfoTaskTypeEnum() ChildrenJobInfoTaskTypeEnum {\n\treturn ChildrenJobInfoTaskTypeEnum{\n\t\tFULL_TRANS: ChildrenJobInfoTaskType{\n\t\t\tvalue: \"FULL_TRANS \u5168\u91cf\",\n\t\t},\n\t\tFULL_INCR_TRANS: ChildrenJobInfoTaskType{\n\t\t\tvalue: \"FULL_INCR_TRANS \u5168\u91cf+\u589e\u91cf\",\n\t\t},\n\t\tINCR_TRANS: ChildrenJobInfoTaskType{\n\t\t\tvalue: \"INCR_TRANS \u589e\u91cf\",\n\t\t},\n\t}\n}\n\nfunc (c ChildrenJobInfoTaskType) MarshalJSON() ([]byte, error) {\n\treturn json.Marshal(c.value)\n}\n\nfunc (c *ChildrenJobInfoTaskType) UnmarshalJSON(b []byte) error {\n\tmyConverter := converter.StringConverterFactory(\"string\")\n\tif myConverter != nil {\n\t\tval, err := myConverter.CovertStringToInterface(strings.Trim(string(b[:]), \"\\\"\"))\n\t\tif err == nil {\n\t\t\tc.value = val.(string)\n\t\t\treturn nil\n\t\t}\n\t\treturn err\n\t} else {\n\t\treturn errors.New(\"convert enum data to string error\")\n\t}\n}\n","avg_line_length":23.5195729537,"max_line_length":83,"alphanum_fraction":0.742623695} +{"size":1451,"ext":"go","lang":"Go","max_stars_count":null,"content":"\/\/ Copyright (c) 2018 Uber Technologies, Inc.\n\/\/\n\/\/ Permission is hereby granted, free of charge, to any person obtaining a copy\n\/\/ of this software and associated documentation files (the \"Software\"), to deal\n\/\/ in the Software without restriction, including without limitation the rights\n\/\/ to use, copy, modify, merge, publish, distribute, sublicense, and\/or sell\n\/\/ copies of the Software, and to permit persons to whom the Software is\n\/\/ furnished to do so, subject to the following conditions:\n\/\/\n\/\/ The above copyright notice and this permission notice shall be included in\n\/\/ all copies or substantial portions of the Software.\n\/\/\n\/\/ THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\/\/ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\/\/ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\/\/ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\/\/ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n\/\/ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n\/\/ THE SOFTWARE.\n\npackage semver\n\n\/\/ Range is the range of version numbers that lie in [Begin, End).\ntype Range struct {\n\tBegin Version\n\tEnd Version\n}\n\n\/\/ Contains returns true if the given semver version number is in this range.\nfunc (r *Range) Contains(other Version) bool {\n\treturn other.Compare(&r.Begin) >= 0 && other.Compare(&r.End) < 0\n}\n","avg_line_length":43.9696969697,"max_line_length":80,"alphanum_fraction":0.7539627843} +{"size":4147,"ext":"go","lang":"Go","max_stars_count":410.0,"content":"package compose\n\nimport (\n\t\"encoding\/csv\"\n\t\"fmt\"\n\t\"regexp\"\n\t\"strconv\"\n\t\"strings\"\n\n\t\"github.com\/docker\/docker\/api\/types\/swarm\"\n\t\"github.com\/docker\/go-connections\/nat\"\n)\n\nconst (\n\tportOptTargetPort = \"target\"\n\tportOptPublishedPort = \"published\"\n\tportOptProtocol = \"protocol\"\n\tportOptMode = \"mode\"\n)\n\n\/\/ PortOpt represents a port config in swarm mode.\ntype PortOpt struct {\n\tports []swarm.PortConfig\n}\n\n\/\/ Set a new port value\n\/\/ nolint: gocyclo\nfunc (p *PortOpt) Set(value string) error {\n\tlongSyntax, err := regexp.MatchString(`\\w+=\\w+(,\\w+=\\w+)*`, value)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif longSyntax {\n\t\tcsvReader := csv.NewReader(strings.NewReader(value))\n\t\tfields, err := csvReader.Read()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tpConfig := swarm.PortConfig{}\n\t\tfor _, field := range fields {\n\t\t\tparts := strings.SplitN(field, \"=\", 2)\n\t\t\tif len(parts) != 2 {\n\t\t\t\treturn fmt.Errorf(\"invalid field %s\", field)\n\t\t\t}\n\n\t\t\tkey := strings.ToLower(parts[0])\n\t\t\tvalue := strings.ToLower(parts[1])\n\n\t\t\tswitch key {\n\t\t\tcase portOptProtocol:\n\t\t\t\tif value != string(swarm.PortConfigProtocolTCP) && value != string(swarm.PortConfigProtocolUDP) {\n\t\t\t\t\treturn fmt.Errorf(\"invalid protocol value %s\", value)\n\t\t\t\t}\n\n\t\t\t\tpConfig.Protocol = swarm.PortConfigProtocol(value)\n\t\t\tcase portOptMode:\n\t\t\t\tif value != string(swarm.PortConfigPublishModeIngress) && value != string(swarm.PortConfigPublishModeHost) {\n\t\t\t\t\treturn fmt.Errorf(\"invalid publish mode value %s\", value)\n\t\t\t\t}\n\n\t\t\t\tpConfig.PublishMode = swarm.PortConfigPublishMode(value)\n\t\t\tcase portOptTargetPort:\n\t\t\t\ttPort, err := strconv.ParseUint(value, 10, 16)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\n\t\t\t\tpConfig.TargetPort = uint32(tPort)\n\t\t\tcase portOptPublishedPort:\n\t\t\t\tpPort, err := strconv.ParseUint(value, 10, 16)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\n\t\t\t\tpConfig.PublishedPort = uint32(pPort)\n\t\t\tdefault:\n\t\t\t\treturn fmt.Errorf(\"invalid field key %s\", key)\n\t\t\t}\n\t\t}\n\n\t\tif pConfig.TargetPort == 0 {\n\t\t\treturn fmt.Errorf(\"missing mandatory field %q\", portOptTargetPort)\n\t\t}\n\n\t\tif pConfig.PublishMode == \"\" {\n\t\t\tpConfig.PublishMode = swarm.PortConfigPublishModeIngress\n\t\t}\n\n\t\tif pConfig.Protocol == \"\" {\n\t\t\tpConfig.Protocol = swarm.PortConfigProtocolTCP\n\t\t}\n\n\t\tp.ports = append(p.ports, pConfig)\n\t} else {\n\t\t\/\/ short syntax\n\t\tportConfigs := []swarm.PortConfig{}\n\t\tports, portBindingMap, err := nat.ParsePortSpecs([]string{value})\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tfor _, portBindings := range portBindingMap {\n\t\t\tfor _, portBinding := range portBindings {\n\t\t\t\tif portBinding.HostIP != \"\" {\n\t\t\t\t\treturn fmt.Errorf(\"hostip is not supported\")\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tfor port := range ports {\n\t\t\tportConfig, err := ConvertPortToPortConfig(port, portBindingMap)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tportConfigs = append(portConfigs, portConfig...)\n\t\t}\n\t\tp.ports = append(p.ports, portConfigs...)\n\t}\n\treturn nil\n}\n\n\/\/ Type returns the type of this option\nfunc (p *PortOpt) Type() string {\n\treturn \"port\"\n}\n\n\/\/ String returns a string repr of this option\nfunc (p *PortOpt) String() string {\n\tports := []string{}\n\tfor _, port := range p.ports {\n\t\trepr := fmt.Sprintf(\"%v:%v\/%s\/%s\", port.PublishedPort, port.TargetPort, port.Protocol, port.PublishMode)\n\t\tports = append(ports, repr)\n\t}\n\treturn strings.Join(ports, \", \")\n}\n\n\/\/ Value returns the ports\nfunc (p *PortOpt) Value() []swarm.PortConfig {\n\treturn p.ports\n}\n\n\/\/ ConvertPortToPortConfig converts ports to the swarm type\nfunc ConvertPortToPortConfig(\n\tport nat.Port,\n\tportBindings map[nat.Port][]nat.PortBinding,\n) ([]swarm.PortConfig, error) {\n\tports := []swarm.PortConfig{}\n\n\tfor _, binding := range portBindings[port] {\n\t\thostPort, err := strconv.ParseUint(binding.HostPort, 10, 16)\n\t\tif err != nil && binding.HostPort != \"\" {\n\t\t\treturn nil, fmt.Errorf(\"invalid hostport binding (%s) for port (%s)\", binding.HostPort, port.Port())\n\t\t}\n\t\tports = append(ports, swarm.PortConfig{\n\t\t\t\/\/TODO Name: ?\n\t\t\tProtocol: swarm.PortConfigProtocol(strings.ToLower(port.Proto())),\n\t\t\tTargetPort: uint32(port.Int()),\n\t\t\tPublishedPort: uint32(hostPort),\n\t\t\tPublishMode: swarm.PortConfigPublishModeIngress,\n\t\t})\n\t}\n\treturn ports, nil\n}\n","avg_line_length":25.2865853659,"max_line_length":112,"alphanum_fraction":0.6698818423} +{"size":5270,"ext":"go","lang":"Go","max_stars_count":null,"content":"package memsql\n\nimport (\n\t\"testing\"\n\n\t\"github.com\/runner-mei\/memsql\/memcore\"\n\t\"github.com\/runner-mei\/memsql\/parser\"\n\t\"github.com\/runner-mei\/memsql\/vm\"\n\t\"github.com\/xwb1989\/sqlparser\"\n)\n\nfunc TestFilter(t *testing.T) {\n\t\/\/ conn, err := sql.Open(\"sqlite3\", \":memory:\")\n\t\/\/ if err != nil {\n\t\/\/ \tt.Fatal(err)\n\t\/\/ \treturn\n\t\/\/ }\n\t\/\/ defer conn.Close()\n\n\t\/\/ storage := memcore.NewStorage()\n\n\t\/\/ table, err := memcore.ToTable([]map[string]interface{}{\n\t\/\/ \tmap[string]interface{}{\n\t\/\/ \t\t\"id\": 1,\n\t\/\/ \t\t\"name\": \"1\",\n\t\/\/ \t},\n\t\/\/ \tmap[string]interface{}{\n\t\/\/ \t\t\"id\": 2,\n\t\/\/ \t\t\"name\": \"3\",\n\t\/\/ \t},\n\t\/\/ })\n\t\/\/ if err != nil {\n\t\/\/ \tt.Error(err)\n\t\/\/ \treturn\n\t\/\/ }\n\t\/\/ storage.Set(\"mo_list\", nil, time.Now(), table, nil)\n\n\t\/\/ ctx := &Context{\n\t\/\/ \tCtx: context.Background(),\n\t\/\/ \tStorage: WrapStorage(storage),\n\t\/\/ \tForeign: NewDbForeign(\"sqlite3\", conn),\n\t\/\/ }\n\t\/\/ fctx := &SessionContext{\n\t\/\/ \tContext: ctx,\n\t\/\/ \talias: map[string]string{},\n\t\/\/ \tresultSets: map[string][]memcore.Record{},\n\t\/\/ }\n\n\t\/\/ query, _, err := storage.From(ctx, \"mo_list\", func(ctx memcore.GetValuer) (bool, error){\n\t\/\/ \treturn true, nil\n\t\/\/ })\n\t\/\/ fctx.addQuery(\"mo_list\", \"mo\", query)\n\n\t\/\/ opts := cmp.Options{\n\t\/\/ \tcmpopts.EquateApproxTime(1 * time.Second),\n\t\/\/ }\n\n\tfor _, test := range []struct {\n\t\tfctx parser.FilterContext\n\t\tsql string\n\t\tvalues map[string]map[string]vm.Value\n\t\tresult bool\n\t}{\n\t\t{\n\t\t\tsql: \"select * from cpu where a <> 2\",\n\t\t\tvalues: map[string]map[string]vm.Value{\n\t\t\t\t\"\": map[string]vm.Value{\n\t\t\t\t\t\"a\": vm.IntToValue(1),\n\t\t\t\t},\n\t\t\t},\n\t\t\tresult: true,\n\t\t},\n\t\t{\n\t\t\tsql: \"select * from cpu where a <> 2\",\n\t\t\tvalues: map[string]map[string]vm.Value{\n\t\t\t\t\"\": map[string]vm.Value{\n\t\t\t\t\t\"a\": vm.IntToValue(2),\n\t\t\t\t},\n\t\t\t},\n\t\t\tresult: false,\n\t\t},\n\t\t{\n\t\t\tsql: \"select * from cpu where a = 2\",\n\t\t\tvalues: map[string]map[string]vm.Value{\n\t\t\t\t\"\": map[string]vm.Value{\n\t\t\t\t\t\"a\": vm.IntToValue(1),\n\t\t\t\t},\n\t\t\t},\n\t\t\tresult: false,\n\t\t},\n\t\t{\n\t\t\tsql: \"select * from cpu where a = 2\",\n\t\t\tvalues: map[string]map[string]vm.Value{\n\t\t\t\t\"\": map[string]vm.Value{\n\t\t\t\t\t\"a\": vm.IntToValue(2),\n\t\t\t\t},\n\t\t\t},\n\t\t\tresult: true,\n\t\t},\n\t\t{\n\t\t\tsql: \"select * from cpu where a = 2 and b = 3\",\n\t\t\tvalues: map[string]map[string]vm.Value{\n\t\t\t\t\"\": map[string]vm.Value{\n\t\t\t\t\t\"a\": vm.IntToValue(2),\n\t\t\t\t\t\"b\": vm.IntToValue(3),\n\t\t\t\t},\n\t\t\t},\n\t\t\tresult: true,\n\t\t},\n\t\t{\n\t\t\tsql: \"select * from cpu where a = 2 or b = 3\",\n\t\t\tvalues: map[string]map[string]vm.Value{\n\t\t\t\t\"\": map[string]vm.Value{\n\t\t\t\t\t\"a\": vm.IntToValue(3),\n\t\t\t\t\t\"b\": vm.IntToValue(3),\n\t\t\t\t},\n\t\t\t},\n\t\t\tresult: true,\n\t\t},\n\t\t{\n\t\t\tsql: \"select * from cpu where a in (1,2)\",\n\t\t\tvalues: map[string]map[string]vm.Value{\n\t\t\t\t\"\": map[string]vm.Value{\n\t\t\t\t\t\"a\": vm.IntToValue(2),\n\t\t\t\t},\n\t\t\t},\n\t\t\tresult: true,\n\t\t},\n\t\t{\n\t\t\tsql: \"select * from cpu where a not in (1,2)\",\n\t\t\tvalues: map[string]map[string]vm.Value{\n\t\t\t\t\"\": map[string]vm.Value{\n\t\t\t\t\t\"a\": vm.IntToValue(4),\n\t\t\t\t},\n\t\t\t},\n\t\t\tresult: true,\n\t\t},\n\t\t{\n\t\t\tsql: \"select * from cpu where a > 2\",\n\t\t\tvalues: map[string]map[string]vm.Value{\n\t\t\t\t\"\": map[string]vm.Value{\n\t\t\t\t\t\"a\": vm.IntToValue(2),\n\t\t\t\t},\n\t\t\t},\n\t\t\tresult: false,\n\t\t},\n\t\t{\n\t\t\tsql: \"select * from cpu where a > 2\",\n\t\t\tvalues: map[string]map[string]vm.Value{\n\t\t\t\t\"\": map[string]vm.Value{\n\t\t\t\t\t\"a\": vm.IntToValue(4),\n\t\t\t\t},\n\t\t\t},\n\t\t\tresult: true,\n\t\t},\n\t\t{\n\t\t\tsql: \"select * from cpu where a >= 2\",\n\t\t\tvalues: map[string]map[string]vm.Value{\n\t\t\t\t\"\": map[string]vm.Value{\n\t\t\t\t\t\"a\": vm.IntToValue(2),\n\t\t\t\t},\n\t\t\t},\n\t\t\tresult: true,\n\t\t},\n\t\t{\n\t\t\tsql: \"select * from cpu where a >= 2\",\n\t\t\tvalues: map[string]map[string]vm.Value{\n\t\t\t\t\"\": map[string]vm.Value{\n\t\t\t\t\t\"a\": vm.IntToValue(4),\n\t\t\t\t},\n\t\t\t},\n\t\t\tresult: true,\n\t\t},\n\t\t{\n\t\t\tsql: \"select * from cpu where a < 2\",\n\t\t\tvalues: map[string]map[string]vm.Value{\n\t\t\t\t\"\": map[string]vm.Value{\n\t\t\t\t\t\"a\": vm.IntToValue(2),\n\t\t\t\t},\n\t\t\t},\n\t\t\tresult: false,\n\t\t},\n\t\t{\n\t\t\tsql: \"select * from cpu where a < 2\",\n\t\t\tvalues: map[string]map[string]vm.Value{\n\t\t\t\t\"\": map[string]vm.Value{\n\t\t\t\t\t\"a\": vm.IntToValue(1),\n\t\t\t\t},\n\t\t\t},\n\t\t\tresult: true,\n\t\t},\n\t\t{\n\t\t\tsql: \"select * from cpu where a <= 2\",\n\t\t\tvalues: map[string]map[string]vm.Value{\n\t\t\t\t\"\": map[string]vm.Value{\n\t\t\t\t\t\"a\": vm.IntToValue(2),\n\t\t\t\t},\n\t\t\t},\n\t\t\tresult: true,\n\t\t},\n\t\t{\n\t\t\tsql: \"select * from cpu where a <= 2\",\n\t\t\tvalues: map[string]map[string]vm.Value{\n\t\t\t\t\"\": map[string]vm.Value{\n\t\t\t\t\t\"a\": vm.IntToValue(1),\n\t\t\t\t},\n\t\t\t},\n\t\t\tresult: true,\n\t\t},\n\t} {\n\t\tt.Run(test.sql, func(t *testing.T) {\n\t\t\tstmt, err := sqlparser.Parse(test.sql)\n\t\t\tif err != nil {\n\t\t\t\tt.Error(test.sql)\n\t\t\t\tt.Error(err)\n\t\t\t\treturn\n\t\t\t}\n\t\t\tsel, _ := stmt.(*sqlparser.Select)\n\n\t\t\tpredicate, err := parser.ToFilter(test.fctx, sel.Where.Expr)\n\t\t\tif err != nil {\n\t\t\t\tt.Error(err)\n\t\t\t\treturn\n\t\t\t}\n\t\t\tif predicate == nil {\n\t\t\t\tt.Error(\"predicate is null\")\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tctx := vm.GetValueFunc(func(tableName, name string) (Value, error) {\n\t\t\t\tvalues := test.values[tableName]\n\t\t\t\tif values == nil {\n\t\t\t\t\treturn vm.Null(), memcore.ColumnNotFound(tableName, name)\n\t\t\t\t}\n\t\t\t\tvalue, ok := values[name]\n\t\t\t\tif !ok {\n\t\t\t\t\treturn vm.Null(), memcore.ColumnNotFound(tableName, name)\n\t\t\t\t}\n\t\t\t\treturn value, nil\n\t\t\t})\n\t\t\tresult, err := predicate(ctx)\n\t\t\tif err != nil {\n\t\t\t\tt.Error(err)\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tif result != test.result {\n\t\t\t\tt.Error(\"want\", test.result, \"got\", result)\n\t\t\t}\n\t\t})\n\t}\n\n}\n","avg_line_length":20.7480314961,"max_line_length":92,"alphanum_fraction":0.5472485769} +{"size":948,"ext":"go","lang":"Go","max_stars_count":1.0,"content":"package product\n\nimport (\n \"encoding\/xml\"\n\n \"github.com\/bububa\/opentaobao\/model\"\n)\n\n\/* \n\u83b7\u53d6\u63a8\u5e7f\u94fe\u63a5 APIResponse\naliexpress.social.item.promotion\n\n\u83b7\u53d6\u5546\u54c1\u793e\u4ea4\u63a8\u5e7f\u94fe\u63a5\n*\/\ntype AliexpressSocialItemPromotionAPIResponse struct {\n model.CommonResponse\n AliexpressSocialItemPromotionResponse\n}\n\ntype AliexpressSocialItemPromotionResponse struct {\n XMLName xml.Name `xml:\"aliexpress_social_item_promotion_response\"`\n\tRequestId string `json:\"request_id,omitempty\" xml:\"request_id,omitempty\"` \/\/ \u5e73\u53f0\u9881\u53d1\u7684\u6bcf\u6b21\u8bf7\u6c42\u8bbf\u95ee\u7684\u552f\u4e00\u6807\u8bc6\n\n \/\/ \u63a8\u5e7f\u94fe\u63a5\n \n PromotionUrl string `json:\"promotion_url,omitempty\" xml:\"promotion_url,omitempty\"`\n\n \n \/\/ \u662f\u5426\u6210\u529f\n \n IsSuccess bool `json:\"is_success,omitempty\" xml:\"is_success,omitempty\"`\n\n \n \/\/ \u9519\u8bef\u7801\n \n ResultCode string `json:\"result_code,omitempty\" xml:\"result_code,omitempty\"`\n\n \n \/\/ \u9519\u8bef\u4fe1\u606f\n \n ResultMsg string `json:\"result_msg,omitempty\" xml:\"result_msg,omitempty\"`\n\n \n}\n","avg_line_length":21.0666666667,"max_line_length":114,"alphanum_fraction":0.7056962025} +{"size":405,"ext":"go","lang":"Go","max_stars_count":null,"content":"\/\/ Copyright (C) MongoDB, Inc. 2017-present.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n\/\/ not use this file except in compliance with the License. You may obtain\n\/\/ a copy of the License at http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\npackage version \/\/ import \"go.mongodb.org\/mongo-driver\/version\"\n\n\/\/ Driver is the current version of the driver.\nvar Driver = \"v1.2.1\"\n","avg_line_length":36.8181818182,"max_line_length":74,"alphanum_fraction":0.7259259259} +{"size":14535,"ext":"go","lang":"Go","max_stars_count":null,"content":"package logfmt\n\nimport (\n\t\"bytes\"\n\t\"encoding\/base64\"\n\t\"encoding\/json\"\n\t\"errors\"\n\t\"math\"\n\t\"strings\"\n\t\"sync\"\n\t\"time\"\n\t\"unicode\/utf8\"\n\n\t\"go.uber.org\/zap\/buffer\"\n\t\"go.uber.org\/zap\/zapcore\"\n)\n\nconst (\n\t\/\/ For JSON-escaping; see logfmtEncoder.safeAddString below.\n\t_hex = \"0123456789abcdef\"\n)\n\nvar (\n\t_logfmtPool = sync.Pool{New: func() interface{} {\n\t\treturn &logfmtEncoder{}\n\t}}\n\n\tbufferpool = buffer.NewPool()\n)\n\nvar ErrUnsupportedValueType = errors.New(\"unsupported value type\")\n\nfunc getEncoder() *logfmtEncoder {\n\treturn _logfmtPool.Get().(*logfmtEncoder)\n}\n\nfunc putEncoder(enc *logfmtEncoder) {\n\tenc.EncoderConfig = nil\n\tenc.buf = nil\n\t_logfmtPool.Put(enc)\n}\n\ntype logfmtEncoder struct {\n\t*zapcore.EncoderConfig\n\tbuf *buffer.Buffer\n\tnamespaces []string\n}\n\nfunc NewEncoder(cfg zapcore.EncoderConfig) zapcore.Encoder {\n\treturn &logfmtEncoder{\n\t\tEncoderConfig: &cfg,\n\t\tbuf: bufferpool.Get(),\n\t}\n}\n\nfunc (enc *logfmtEncoder) AddArray(key string, arr zapcore.ArrayMarshaler) error {\n\tenc.addKey(key)\n\treturn enc.AppendArray(arr)\n}\n\nfunc (enc *logfmtEncoder) AddObject(key string, obj zapcore.ObjectMarshaler) error {\n\tenc.addKey(key)\n\treturn enc.AppendObject(obj)\n}\n\nfunc (enc *logfmtEncoder) AddBinary(key string, value []byte) {\n\tenc.AddString(key, base64.StdEncoding.EncodeToString(value))\n}\n\nfunc (enc *logfmtEncoder) AddByteString(key string, value []byte) {\n\tenc.addKey(key)\n\tenc.AppendByteString(value)\n}\n\nfunc (enc *logfmtEncoder) AddBool(key string, value bool) {\n\tenc.addKey(key)\n\tenc.AppendBool(value)\n}\n\nfunc (enc *logfmtEncoder) AddComplex128(key string, value complex128) {\n\tenc.addKey(key)\n\tenc.AppendComplex128(value)\n}\n\nfunc (enc *logfmtEncoder) AddDuration(key string, value time.Duration) {\n\tenc.addKey(key)\n\tenc.AppendDuration(value)\n}\n\nfunc (enc *logfmtEncoder) AddFloat64(key string, value float64) {\n\tenc.addKey(key)\n\tenc.AppendFloat64(value)\n}\n\nfunc (enc *logfmtEncoder) AddInt64(key string, value int64) {\n\tenc.addKey(key)\n\tenc.AppendInt64(value)\n}\n\nfunc (enc *logfmtEncoder) AddReflected(key string, value interface{}) error {\n\tmarshaled, err := json.Marshal(value)\n\tif err != nil {\n\t\treturn err\n\t}\n\tenc.addKey(key)\n\t_, err = enc.buf.Write(marshaled)\n\treturn err\n}\n\nfunc (enc *logfmtEncoder) OpenNamespace(key string) {\n\tenc.namespaces = append(enc.namespaces, key)\n}\n\nfunc (enc *logfmtEncoder) AddString(key, value string) {\n\tenc.addKey(key)\n\tenc.AppendString(value)\n}\n\nfunc (enc *logfmtEncoder) AddTime(key string, value time.Time) {\n\tenc.addKey(key)\n\tenc.AppendTime(value)\n}\n\nfunc (enc *logfmtEncoder) AddUint64(key string, value uint64) {\n\tenc.addKey(key)\n\tenc.AppendUint64(value)\n}\n\nfunc (enc *logfmtEncoder) AppendArray(arr zapcore.ArrayMarshaler) error {\n\tmarshaler := literalEncoder{\n\t\tEncoderConfig: enc.EncoderConfig,\n\t\tbuf: bufferpool.Get(),\n\t}\n\n\terr := arr.MarshalLogArray(&marshaler)\n\tif err == nil {\n\t\tenc.AppendByteString(marshaler.buf.Bytes())\n\t} else {\n\t\tenc.AppendByteString(nil)\n\t}\n\tmarshaler.buf.Free()\n\treturn err\n}\n\nfunc (enc *logfmtEncoder) AppendObject(obj zapcore.ObjectMarshaler) error {\n\tmarshaler := enc.clone()\n\tmarshaler.namespaces = nil\n\n\terr := obj.MarshalLogObject(marshaler)\n\tif err == nil {\n\t\tenc.AppendByteString(marshaler.buf.Bytes())\n\t} else {\n\t\tenc.AppendByteString(nil)\n\t}\n\tmarshaler.buf.Free()\n\tputEncoder(marshaler)\n\treturn err\n}\n\nfunc (enc *logfmtEncoder) AppendBool(value bool) {\n\tif value {\n\t\tenc.AppendString(\"true\")\n\t} else {\n\t\tenc.AppendString(\"false\")\n\t}\n}\n\nfunc (enc *logfmtEncoder) AppendByteString(value []byte) {\n\tneedsQuotes := bytes.IndexFunc(value, needsQuotedValueRune) != -1\n\tif needsQuotes {\n\t\tenc.buf.AppendByte('\"')\n\t}\n\tenc.safeAddByteString(value)\n\tif needsQuotes {\n\t\tenc.buf.AppendByte('\"')\n\t}\n}\n\nfunc (enc *logfmtEncoder) AppendComplex128(value complex128) {\n\t\/\/ Cast to a platform-independent, fixed-size type.\n\tr, i := float64(real(value)), float64(imag(value))\n\tenc.buf.AppendFloat(r, 64)\n\tenc.buf.AppendByte('+')\n\tenc.buf.AppendFloat(i, 64)\n\tenc.buf.AppendByte('i')\n}\n\nfunc (enc *logfmtEncoder) AppendDuration(value time.Duration) {\n\tcur := enc.buf.Len()\n\tenc.EncodeDuration(value, enc)\n\tif cur == enc.buf.Len() {\n\t\t\/\/ User-supplied EncodeDuration is a no-op. Fall back to nanoseconds.\n\t\tenc.AppendInt64(int64(value))\n\t}\n}\n\nfunc (enc *logfmtEncoder) AppendInt64(value int64) {\n\tenc.buf.AppendInt(value)\n}\n\nfunc (enc *logfmtEncoder) AppendReflected(value interface{}) error {\n\tmarshaled, err := json.Marshal(value)\n\tif err != nil {\n\t\treturn err\n\t}\n\t_, err = enc.buf.Write(marshaled)\n\treturn err\n}\n\nfunc (enc *logfmtEncoder) AppendString(value string) {\n\tneedsQuotes := strings.IndexFunc(value, needsQuotedValueRune) != -1\n\tif needsQuotes {\n\t\tenc.buf.AppendByte('\"')\n\t}\n\tenc.safeAddString(value)\n\tif needsQuotes {\n\t\tenc.buf.AppendByte('\"')\n\t}\n}\n\nfunc (enc *logfmtEncoder) AppendTime(value time.Time) {\n\tcur := enc.buf.Len()\n\tenc.EncodeTime(value, enc)\n\tif cur == enc.buf.Len() {\n\t\tenc.AppendInt64(value.UnixNano())\n\t}\n}\n\nfunc (enc *logfmtEncoder) AppendUint64(value uint64) {\n\tenc.buf.AppendUint(value)\n}\n\nfunc (enc *logfmtEncoder) AddComplex64(k string, v complex64) { enc.AddComplex128(k, complex128(v)) }\nfunc (enc *logfmtEncoder) AddFloat32(k string, v float32) { enc.AddFloat64(k, float64(v)) }\nfunc (enc *logfmtEncoder) AddInt(k string, v int) { enc.AddInt64(k, int64(v)) }\nfunc (enc *logfmtEncoder) AddInt32(k string, v int32) { enc.AddInt64(k, int64(v)) }\nfunc (enc *logfmtEncoder) AddInt16(k string, v int16) { enc.AddInt64(k, int64(v)) }\nfunc (enc *logfmtEncoder) AddInt8(k string, v int8) { enc.AddInt64(k, int64(v)) }\nfunc (enc *logfmtEncoder) AddUint(k string, v uint) { enc.AddUint64(k, uint64(v)) }\nfunc (enc *logfmtEncoder) AddUint32(k string, v uint32) { enc.AddUint64(k, uint64(v)) }\nfunc (enc *logfmtEncoder) AddUint16(k string, v uint16) { enc.AddUint64(k, uint64(v)) }\nfunc (enc *logfmtEncoder) AddUint8(k string, v uint8) { enc.AddUint64(k, uint64(v)) }\nfunc (enc *logfmtEncoder) AddUintptr(k string, v uintptr) { enc.AddUint64(k, uint64(v)) }\nfunc (enc *logfmtEncoder) AppendComplex64(v complex64) { enc.AppendComplex128(complex128(v)) }\nfunc (enc *logfmtEncoder) AppendFloat64(v float64) { enc.appendFloat(v, 64) }\nfunc (enc *logfmtEncoder) AppendFloat32(v float32) { enc.appendFloat(float64(v), 32) }\nfunc (enc *logfmtEncoder) AppendInt(v int) { enc.AppendInt64(int64(v)) }\nfunc (enc *logfmtEncoder) AppendInt32(v int32) { enc.AppendInt64(int64(v)) }\nfunc (enc *logfmtEncoder) AppendInt16(v int16) { enc.AppendInt64(int64(v)) }\nfunc (enc *logfmtEncoder) AppendInt8(v int8) { enc.AppendInt64(int64(v)) }\nfunc (enc *logfmtEncoder) AppendUint(v uint) { enc.AppendUint64(uint64(v)) }\nfunc (enc *logfmtEncoder) AppendUint32(v uint32) { enc.AppendUint64(uint64(v)) }\nfunc (enc *logfmtEncoder) AppendUint16(v uint16) { enc.AppendUint64(uint64(v)) }\nfunc (enc *logfmtEncoder) AppendUint8(v uint8) { enc.AppendUint64(uint64(v)) }\nfunc (enc *logfmtEncoder) AppendUintptr(v uintptr) { enc.AppendUint64(uint64(v)) }\n\nfunc (enc *logfmtEncoder) Clone() zapcore.Encoder {\n\tclone := enc.clone()\n\tclone.buf.Write(enc.buf.Bytes())\n\treturn clone\n}\n\nfunc (enc *logfmtEncoder) clone() *logfmtEncoder {\n\tclone := getEncoder()\n\tclone.EncoderConfig = enc.EncoderConfig\n\tclone.buf = bufferpool.Get()\n\tclone.namespaces = enc.namespaces\n\treturn clone\n}\n\nfunc (enc *logfmtEncoder) EncodeEntry(ent zapcore.Entry, fields []zapcore.Field) (*buffer.Buffer, error) {\n\tfinal := enc.clone()\n\tif final.TimeKey != \"\" {\n\t\tfinal.AddTime(final.TimeKey, ent.Time)\n\t}\n\tif final.LevelKey != \"\" {\n\t\tfinal.addKey(final.LevelKey)\n\t\tcur := final.buf.Len()\n\t\tfinal.EncodeLevel(ent.Level, final)\n\t\tif cur == final.buf.Len() {\n\t\t\t\/\/ User-supplied EncodeLevel was a no-op. Fall back to strings to keep\n\t\t\t\/\/ output valid.\n\t\t\tfinal.AppendString(ent.Level.String())\n\t\t}\n\t}\n\tif ent.Caller.Defined && final.CallerKey != \"\" {\n\t\tfinal.addKey(final.CallerKey)\n\t\tcur := final.buf.Len()\n\t\tfinal.EncodeCaller(ent.Caller, final)\n\t\tif cur == final.buf.Len() {\n\t\t\t\/\/ User-supplied EncodeCaller was a no-op. Fall back to strings to\n\t\t\t\/\/ keep output valid.\n\t\t\tfinal.AppendString(ent.Caller.String())\n\t\t}\n\t}\n\tif final.MessageKey != \"\" {\n\t\tfinal.addKey(enc.MessageKey)\n\t\tfinal.AppendString(ent.Message)\n\t}\n\tif enc.buf.Len() > 0 {\n\t\tfinal.buf.AppendByte(' ')\n\t\tfinal.buf.Write(enc.buf.Bytes())\n\t}\n\taddFields(final, fields)\n\tif ent.Stack != \"\" && final.StacktraceKey != \"\" {\n\t\tfinal.AddString(final.StacktraceKey, ent.Stack)\n\t}\n\tif final.LineEnding != \"\" {\n\t\tfinal.buf.AppendString(final.LineEnding)\n\t} else {\n\t\tfinal.buf.AppendString(zapcore.DefaultLineEnding)\n\t}\n\n\tret := final.buf\n\tputEncoder(final)\n\treturn ret, nil\n}\n\nfunc (enc *logfmtEncoder) truncate() {\n\tenc.buf.Reset()\n\tenc.namespaces = nil\n}\n\nfunc (enc *logfmtEncoder) addKey(key string) {\n\tif enc.buf.Len() > 0 {\n\t\tenc.buf.AppendByte(' ')\n\t}\n\tfor _, ns := range enc.namespaces {\n\t\tenc.safeAddString(ns)\n\t\tenc.buf.AppendByte('.')\n\t}\n\tenc.safeAddString(key)\n\tenc.buf.AppendByte('=')\n}\n\nfunc (enc *logfmtEncoder) appendFloat(val float64, bitSize int) {\n\tswitch {\n\tcase math.IsNaN(val):\n\t\tenc.buf.AppendString(`NaN`)\n\tcase math.IsInf(val, 1):\n\t\tenc.buf.AppendString(`+Inf`)\n\tcase math.IsInf(val, -1):\n\t\tenc.buf.AppendString(`-Inf`)\n\tdefault:\n\t\tenc.buf.AppendFloat(val, bitSize)\n\t}\n}\n\n\/\/ safeAddString JSON-escapes a string and appends it to the internal buffer.\n\/\/ Unlike the standard library's encoder, it doesn't attempt to protect the\n\/\/ user from browser vulnerabilities or JSONP-related problems.\nfunc (enc *logfmtEncoder) safeAddString(s string) {\n\tfor i := 0; i < len(s); {\n\t\tif enc.tryAddRuneSelf(s[i]) {\n\t\t\ti++\n\t\t\tcontinue\n\t\t}\n\t\tr, size := utf8.DecodeRuneInString(s[i:])\n\t\tif enc.tryAddRuneError(r, size) {\n\t\t\ti++\n\t\t\tcontinue\n\t\t}\n\t\tenc.buf.AppendString(s[i : i+size])\n\t\ti += size\n\t}\n}\n\n\/\/ safeAddByteString is no-alloc equivalent of safeAddString(string(s)) for s []byte.\nfunc (enc *logfmtEncoder) safeAddByteString(s []byte) {\n\tfor i := 0; i < len(s); {\n\t\tif enc.tryAddRuneSelf(s[i]) {\n\t\t\ti++\n\t\t\tcontinue\n\t\t}\n\t\tr, size := utf8.DecodeRune(s[i:])\n\t\tif enc.tryAddRuneError(r, size) {\n\t\t\ti++\n\t\t\tcontinue\n\t\t}\n\t\tenc.buf.Write(s[i : i+size])\n\t\ti += size\n\t}\n}\n\n\/\/ tryAddRuneSelf appends b if it is valid UTF-8 character represented in a single byte.\nfunc (enc *logfmtEncoder) tryAddRuneSelf(b byte) bool {\n\tif b >= utf8.RuneSelf {\n\t\treturn false\n\t}\n\tif 0x20 <= b && b != '\\\\' && b != '\"' {\n\t\tenc.buf.AppendByte(b)\n\t\treturn true\n\t}\n\tswitch b {\n\tcase '\\\\', '\"':\n\t\tenc.buf.AppendByte('\\\\')\n\t\tenc.buf.AppendByte(b)\n\tcase '\\n':\n\t\tenc.buf.AppendByte('\\\\')\n\t\tenc.buf.AppendByte('n')\n\tcase '\\r':\n\t\tenc.buf.AppendByte('\\\\')\n\t\tenc.buf.AppendByte('r')\n\tcase '\\t':\n\t\tenc.buf.AppendByte('\\\\')\n\t\tenc.buf.AppendByte('t')\n\tdefault:\n\t\t\/\/ Encode bytes < 0x20, except for the escape sequences above.\n\t\tenc.buf.AppendString(`\\u00`)\n\t\tenc.buf.AppendByte(_hex[b>>4])\n\t\tenc.buf.AppendByte(_hex[b&0xF])\n\t}\n\treturn true\n}\n\nfunc (enc *logfmtEncoder) tryAddRuneError(r rune, size int) bool {\n\tif r == utf8.RuneError && size == 1 {\n\t\tenc.buf.AppendString(`\\ufffd`)\n\t\treturn true\n\t}\n\treturn false\n}\n\ntype literalEncoder struct {\n\t*zapcore.EncoderConfig\n\tbuf *buffer.Buffer\n}\n\nfunc (enc *literalEncoder) AppendBool(value bool) {\n\tenc.addSeparator()\n\tif value {\n\t\tenc.AppendString(\"true\")\n\t} else {\n\t\tenc.AppendString(\"false\")\n\t}\n}\n\nfunc (enc *literalEncoder) AppendByteString(value []byte) {\n\tenc.addSeparator()\n\tenc.buf.AppendString(string(value))\n}\n\nfunc (enc *literalEncoder) AppendComplex128(value complex128) {\n\tenc.addSeparator()\n\t\/\/ Cast to a platform-independent, fixed-size type.\n\tr, i := float64(real(value)), float64(imag(value))\n\tenc.buf.AppendFloat(r, 64)\n\tenc.buf.AppendByte('+')\n\tenc.buf.AppendFloat(i, 64)\n\tenc.buf.AppendByte('i')\n}\n\nfunc (enc *literalEncoder) AppendComplex64(value complex64) {\n\tenc.AppendComplex128(complex128(value))\n}\n\nfunc (enc *literalEncoder) AppendFloat64(value float64) {\n\tenc.addSeparator()\n\tenc.buf.AppendFloat(value, 64)\n}\n\nfunc (enc *literalEncoder) AppendFloat32(value float32) {\n\tenc.addSeparator()\n\tenc.buf.AppendFloat(float64(value), 32)\n}\n\nfunc (enc *literalEncoder) AppendInt64(value int64) {\n\tenc.addSeparator()\n\tenc.buf.AppendInt(value)\n}\n\nfunc (enc *literalEncoder) AppendInt(v int) { enc.AppendInt64(int64(v)) }\nfunc (enc *literalEncoder) AppendInt32(v int32) { enc.AppendInt64(int64(v)) }\nfunc (enc *literalEncoder) AppendInt16(v int16) { enc.AppendInt64(int64(v)) }\nfunc (enc *literalEncoder) AppendInt8(v int8) { enc.AppendInt64(int64(v)) }\n\nfunc (enc *literalEncoder) AppendString(value string) {\n\tenc.addSeparator()\n\tenc.buf.AppendString(value)\n}\n\nfunc (enc *literalEncoder) AppendUint64(value uint64) {\n\tenc.addSeparator()\n\tenc.buf.AppendUint(value)\n}\n\nfunc (enc *literalEncoder) AppendUint(v uint) { enc.AppendUint64(uint64(v)) }\nfunc (enc *literalEncoder) AppendUint32(v uint32) { enc.AppendUint64(uint64(v)) }\nfunc (enc *literalEncoder) AppendUint16(v uint16) { enc.AppendUint64(uint64(v)) }\nfunc (enc *literalEncoder) AppendUint8(v uint8) { enc.AppendUint64(uint64(v)) }\nfunc (enc *literalEncoder) AppendUintptr(v uintptr) { enc.AppendUint64(uint64(v)) }\n\nfunc (enc *literalEncoder) AppendDuration(value time.Duration) {\n\tcur := enc.buf.Len()\n\tenc.EncodeDuration(value, enc)\n\tif cur == enc.buf.Len() {\n\t\t\/\/ User-supplied EncodeDuration is a no-op. Fall back to nanoseconds.\n\t\tenc.AppendInt64(int64(value))\n\t}\n}\n\nfunc (enc *literalEncoder) AppendTime(value time.Time) {\n\tcur := enc.buf.Len()\n\tenc.EncodeTime(value, enc)\n\tif cur == enc.buf.Len() {\n\t\tenc.AppendInt64(value.UnixNano())\n\t}\n}\n\nfunc (enc *literalEncoder) AppendArray(arr zapcore.ArrayMarshaler) error {\n\treturn arr.MarshalLogArray(enc)\n}\n\nfunc (enc *literalEncoder) AppendObject(zapcore.ObjectMarshaler) error {\n\treturn ErrUnsupportedValueType\n}\n\nfunc (enc *literalEncoder) AppendReflected(value interface{}) error {\n\treturn ErrUnsupportedValueType\n}\n\nfunc (enc *literalEncoder) addSeparator() {\n\tif enc.buf.Len() > 0 {\n\t\tenc.buf.AppendByte(',')\n\t}\n}\n\nfunc needsQuotedValueRune(r rune) bool {\n\treturn r <= ' ' || r == '=' || r == '\"' || r == utf8.RuneError\n}\n\nfunc addFields(enc zapcore.ObjectEncoder, fields []zapcore.Field) {\n\tfor i := range fields {\n\t\tfields[i].AddTo(enc)\n\t}\n}\n\nfunc init() {\n\t\/\/ zap.RegisterEncoder(\"logfmt\", func(cfg zapcore.EncoderConfig) (zapcore.Encoder, error) {\n\t\/\/ \tenc := NewEncoder(cfg)\n\t\/\/ \treturn enc, nil\n\t\/\/ })\n}\n","avg_line_length":26.71875,"max_line_length":106,"alphanum_fraction":0.6934296526} +{"size":2848,"ext":"go","lang":"Go","max_stars_count":null,"content":"\/\/ Copyright Elasticsearch B.V. and\/or licensed to Elasticsearch B.V. under one\n\/\/ or more contributor license agreements. Licensed under the Elastic License;\n\/\/ you may not use this file except in compliance with the Elastic License.\n\npackage api\n\nimport (\n\t\"sync\"\n\t\"time\"\n\n\t\"github.com\/joeshaw\/multierror\"\n\n\t\"github.com\/codragonzuo\/beats\/libbeat\/logp\"\n)\n\nconst debugK = \"event_reporter\"\n\n\/\/ EventReporter is an object that will periodically send asyncronously events to the\n\/\/ CM events endpoints.\ntype EventReporter struct {\n\tlogger *logp.Logger\n\tclient AuthClienter\n\tperiod time.Duration\n\tmaxBatchSize int\n\tdone chan struct{}\n\tbuffer []Event\n\tmu sync.Mutex\n\twg sync.WaitGroup\n}\n\n\/\/ NewEventReporter returns a new event reporter\nfunc NewEventReporter(\n\tlogger *logp.Logger,\n\tclient AuthClienter,\n\tperiod time.Duration,\n\tmaxBatchSize int,\n) *EventReporter {\n\tlog := logger.Named(debugK)\n\treturn &EventReporter{\n\t\tlogger: log,\n\t\tclient: client,\n\t\tperiod: period,\n\t\tmaxBatchSize: maxBatchSize,\n\t\tdone: make(chan struct{}),\n\t}\n}\n\n\/\/ Start starts the event reported and wait for new events.\nfunc (e *EventReporter) Start() {\n\te.wg.Add(1)\n\tgo e.worker()\n\te.logger.Info(\"Starting event reporter service\")\n}\n\n\/\/ Stop stops the reporting events to the endpoint.\nfunc (e *EventReporter) Stop() {\n\te.logger.Info(\"Stopping event reporter service\")\n\tclose(e.done)\n\te.wg.Wait()\n}\n\nfunc (e *EventReporter) worker() {\n\tdefer e.wg.Done()\n\tticker := time.NewTicker(e.period)\n\tdefer ticker.Stop()\n\n\tvar done bool\n\tfor !done {\n\t\tselect {\n\t\tcase <-e.done:\n\t\t\tdone = true\n\t\tcase <-ticker.C:\n\t\t}\n\n\t\tvar buf []Event\n\t\te.mu.Lock()\n\t\tbuf, e.buffer = e.buffer, nil\n\t\te.mu.Unlock()\n\n\t\te.reportEvents(buf)\n\t}\n}\n\nfunc (e *EventReporter) reportEvents(events []Event) {\n\tif len(events) == 0 {\n\t\treturn\n\t}\n\te.logger.Debugf(\"Reporting %d events to Kibana\", len(events))\n\tif err := e.sendBatchEvents(events); err != nil {\n\t\te.logger.Errorf(\"could not send events, error: %+v\", err)\n\t}\n}\n\nfunc (e *EventReporter) sendBatchEvents(events []Event) error {\n\tvar errors multierror.Errors\n\tfor pos := 0; pos < len(events); pos += e.maxBatchSize {\n\t\tj := pos + e.maxBatchSize\n\t\tif j > len(events) {\n\t\t\tj = len(events)\n\t\t}\n\t\tif err := e.sendEvents(events[pos:j]); err != nil {\n\t\t\terrors = append(errors, err)\n\t\t}\n\t}\n\treturn errors.Err()\n}\n\nfunc (e *EventReporter) sendEvents(events []Event) error {\n\trequests := make([]EventRequest, len(events))\n\tfor i, event := range events {\n\t\trequests[i] = EventRequest{\n\t\t\tTimestamp: time.Now(),\n\t\t\tEventType: event.EventType(),\n\t\t\tEvent: event,\n\t\t}\n\t}\n\treturn e.client.SendEvents(requests)\n}\n\n\/\/ AddEvent adds an event to be send on the next tick.\nfunc (e *EventReporter) AddEvent(event Event) {\n\te.mu.Lock()\n\tdefer e.mu.Unlock()\n\te.buffer = append(e.buffer, event)\n}\n","avg_line_length":22.6031746032,"max_line_length":85,"alphanum_fraction":0.6766151685} +{"size":2359,"ext":"go","lang":"Go","max_stars_count":null,"content":"package h\n\nimport (\n\t. \"github.com\/colin3dmax\/chroma\" \/\/ nolint\n\t\"github.com\/colin3dmax\/chroma\/lexers\/internal\"\n)\n\n\/\/ Hexdump lexer.\nvar Hexdump = internal.Register(MustNewLexer(\n\t&Config{\n\t\tName: \"Hexdump\",\n\t\tAliases: []string{\"hexdump\"},\n\t\tFilenames: []string{},\n\t\tMimeTypes: []string{},\n\t},\n\tRules{\n\t\t\"root\": {\n\t\t\t{`\\n`, Text, nil},\n\t\t\tInclude(\"offset\"),\n\t\t\t{`([0-9A-Ha-h]{2})(\\-)([0-9A-Ha-h]{2})`, ByGroups(LiteralNumberHex, Punctuation, LiteralNumberHex), nil},\n\t\t\t{`[0-9A-Ha-h]{2}`, LiteralNumberHex, nil},\n\t\t\t{`(\\s{2,3})(\\>)(.{16})(\\<)$`, ByGroups(Text, Punctuation, LiteralString, Punctuation), Push(\"bracket-strings\")},\n\t\t\t{`(\\s{2,3})(\\|)(.{16})(\\|)$`, ByGroups(Text, Punctuation, LiteralString, Punctuation), Push(\"piped-strings\")},\n\t\t\t{`(\\s{2,3})(\\>)(.{1,15})(\\<)$`, ByGroups(Text, Punctuation, LiteralString, Punctuation), nil},\n\t\t\t{`(\\s{2,3})(\\|)(.{1,15})(\\|)$`, ByGroups(Text, Punctuation, LiteralString, Punctuation), nil},\n\t\t\t{`(\\s{2,3})(.{1,15})$`, ByGroups(Text, LiteralString), nil},\n\t\t\t{`(\\s{2,3})(.{16}|.{20})$`, ByGroups(Text, LiteralString), Push(\"nonpiped-strings\")},\n\t\t\t{`\\s`, Text, nil},\n\t\t\t{`^\\*`, Punctuation, nil},\n\t\t},\n\t\t\"offset\": {\n\t\t\t{`^([0-9A-Ha-h]+)(:)`, ByGroups(NameLabel, Punctuation), Push(\"offset-mode\")},\n\t\t\t{`^[0-9A-Ha-h]+`, NameLabel, nil},\n\t\t},\n\t\t\"offset-mode\": {\n\t\t\t{`\\s`, Text, Pop(1)},\n\t\t\t{`[0-9A-Ha-h]+`, NameLabel, nil},\n\t\t\t{`:`, Punctuation, nil},\n\t\t},\n\t\t\"piped-strings\": {\n\t\t\t{`\\n`, Text, nil},\n\t\t\tInclude(\"offset\"),\n\t\t\t{`[0-9A-Ha-h]{2}`, LiteralNumberHex, nil},\n\t\t\t{`(\\s{2,3})(\\|)(.{1,16})(\\|)$`, ByGroups(Text, Punctuation, LiteralString, Punctuation), nil},\n\t\t\t{`\\s`, Text, nil},\n\t\t\t{`^\\*`, Punctuation, nil},\n\t\t},\n\t\t\"bracket-strings\": {\n\t\t\t{`\\n`, Text, nil},\n\t\t\tInclude(\"offset\"),\n\t\t\t{`[0-9A-Ha-h]{2}`, LiteralNumberHex, nil},\n\t\t\t{`(\\s{2,3})(\\>)(.{1,16})(\\<)$`, ByGroups(Text, Punctuation, LiteralString, Punctuation), nil},\n\t\t\t{`\\s`, Text, nil},\n\t\t\t{`^\\*`, Punctuation, nil},\n\t\t},\n\t\t\"nonpiped-strings\": {\n\t\t\t{`\\n`, Text, nil},\n\t\t\tInclude(\"offset\"),\n\t\t\t{`([0-9A-Ha-h]{2})(\\-)([0-9A-Ha-h]{2})`, ByGroups(LiteralNumberHex, Punctuation, LiteralNumberHex), nil},\n\t\t\t{`[0-9A-Ha-h]{2}`, LiteralNumberHex, nil},\n\t\t\t{`(\\s{19,})(.{1,20}?)$`, ByGroups(Text, LiteralString), nil},\n\t\t\t{`(\\s{2,3})(.{1,20})$`, ByGroups(Text, LiteralString), nil},\n\t\t\t{`\\s`, Text, nil},\n\t\t\t{`^\\*`, Punctuation, nil},\n\t\t},\n\t},\n))\n","avg_line_length":34.6911764706,"max_line_length":115,"alphanum_fraction":0.5515048749} +{"size":2658,"ext":"go","lang":"Go","max_stars_count":8.0,"content":"\/\/\n\/\/ Last.Backend LLC CONFIDENTIAL\n\/\/ __________________\n\/\/\n\/\/ [2014] - [2019] Last.Backend LLC\n\/\/ All Rights Reserved.\n\/\/\n\/\/ NOTICE: All information contained herein is, and remains\n\/\/ the property of Last.Backend LLC and its suppliers,\n\/\/ if any. The intellectual and technical concepts contained\n\/\/ herein are proprietary to Last.Backend LLC\n\/\/ and its suppliers and may be covered by Russian Federation and Foreign Patents,\n\/\/ patents in process, and are protected by trade secret or copyright law.\n\/\/ Dissemination of this information or reproduction of this material\n\/\/ is strictly forbidden unless prior written permission is obtained\n\/\/ from Last.Backend LLC.\n\/\/\n\npackage api\n\nimport (\n\t\"github.com\/lastbackend\/registry\/pkg\/monitor\"\n\t\"github.com\/lastbackend\/registry\/pkg\/util\/blob\/config\"\n\t\"os\"\n\t\"os\/signal\"\n\t\"syscall\"\n\n\t\"github.com\/lastbackend\/lastbackend\/pkg\/log\"\n\t\"github.com\/lastbackend\/registry\/pkg\/api\/envs\"\n\t\"github.com\/lastbackend\/registry\/pkg\/api\/http\"\n\t\"github.com\/lastbackend\/registry\/pkg\/storage\"\n\t\"github.com\/lastbackend\/registry\/pkg\/util\/blob\"\n\t\"github.com\/lastbackend\/registry\/pkg\/util\/blob\/s3\"\n\t\"github.com\/spf13\/viper\"\n)\n\nfunc Daemon() bool {\n\n\tvar (\n\t\tsigs = make(chan os.Signal)\n\t\tdone = make(chan bool, 1)\n\t)\n\n\tlog.New(viper.GetInt(\"verbose\"))\n\tlog.Info(\"Start API server\")\n\n\tstg, err := storage.Get(viper.GetString(\"psql\"))\n\tif err != nil {\n\t\tlog.Fatalf(\"Cannot initialize storage: %v\", err)\n\t}\n\n\tenvs.Get().SetStorage(stg)\n\n\tif viper.IsSet(\"api.blob_storage\") {\n\t\tvar blobStorage blob.IBlobStorage\n\t\tvar cfg config.Config\n\t\tif err := viper.UnmarshalKey(\"api.blob_storage\", &cfg); err != nil {\n\t\t\tlog.Fatalf(\"parse blob_storage config err: %v\", err)\n\t\t}\n\n\t\tswitch viper.GetString(\"api.blob_storage.type\") {\n\t\tcase blob.DriverS3:\n\t\t\tblobStorage = s3.New(cfg)\n\t\tdefault:\n\t\t\tlog.Fatalf(\"log driver not found\")\n\t\t}\n\t\tenvs.Get().SetBlobStorage(blobStorage)\n\t}\n\n\tenvs.Get().SetMonitor(monitor.New(stg))\n\n\tgo func() {\n\t\topts := new(http.HttpOpts)\n\t\tif viper.IsSet(\"api.tls\") {\n\t\t\topts.Insecure = viper.GetBool(\"api.tls.insecure\")\n\t\t\topts.CertFile = viper.GetString(\"api.tls.cert\")\n\t\t\topts.KeyFile = viper.GetString(\"api.tls.key\")\n\t\t\topts.CaFile = viper.GetString(\"api.tls.ca\")\n\t\t}\n\n\t\tif err := http.Listen(viper.GetString(\"api.host\"), viper.GetInt(\"api.port\"), opts); err != nil {\n\t\t\tlog.Fatalf(\"Http server start error: %v\", err)\n\t\t}\n\t}()\n\n\t\/\/ Handle SIGINT and SIGTERM.\n\tsignal.Notify(sigs, os.Interrupt, syscall.SIGINT, syscall.SIGTERM)\n\n\tgo func() {\n\t\tfor {\n\t\t\tselect {\n\t\t\tcase <-sigs:\n\t\t\t\tenvs.Get().GetMonitor().Stop()\n\t\t\t\tdone <- true\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t}()\n\n\t<-done\n\n\tlog.Info(\"Handle SIGINT and SIGTERM.\")\n\n\treturn true\n}\n","avg_line_length":25.0754716981,"max_line_length":98,"alphanum_fraction":0.7009029345} +{"size":3106,"ext":"go","lang":"Go","max_stars_count":89.0,"content":"\/\/ Copyright (c) 2016, 2018, 2020, Oracle and\/or its affiliates. All rights reserved.\n\/\/ This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https:\/\/oss.oracle.com\/licenses\/upl or Apache License 2.0 as shown at http:\/\/www.apache.org\/licenses\/LICENSE-2.0. You may choose either license.\n\/\/ Code generated. DO NOT EDIT.\n\npackage core\n\nimport (\n\t\"github.com\/oracle\/oci-go-sdk\/v31\/common\"\n\t\"net\/http\"\n)\n\n\/\/ UpdateVolumeGroupBackupRequest wrapper for the UpdateVolumeGroupBackup operation\n\/\/\n\/\/ See also\n\/\/\n\/\/ Click https:\/\/docs.cloud.oracle.com\/en-us\/iaas\/tools\/go-sdk-examples\/latest\/core\/UpdateVolumeGroupBackup.go.html to see an example of how to use UpdateVolumeGroupBackupRequest.\ntype UpdateVolumeGroupBackupRequest struct {\n\n\t\/\/ The Oracle Cloud ID (OCID) that uniquely identifies the volume group backup.\n\tVolumeGroupBackupId *string `mandatory:\"true\" contributesTo:\"path\" name:\"volumeGroupBackupId\"`\n\n\t\/\/ Update volume group backup fields\n\tUpdateVolumeGroupBackupDetails `contributesTo:\"body\"`\n\n\t\/\/ For optimistic concurrency control. In the PUT or DELETE call for a resource, set the `if-match`\n\t\/\/ parameter to the value of the etag from a previous GET or POST response for that resource. The resource\n\t\/\/ will be updated or deleted only if the etag you provide matches the resource's current etag value.\n\tIfMatch *string `mandatory:\"false\" contributesTo:\"header\" name:\"if-match\"`\n\n\t\/\/ Unique Oracle-assigned identifier for the request.\n\t\/\/ If you need to contact Oracle about a particular request, please provide the request ID.\n\tOpcRequestId *string `mandatory:\"false\" contributesTo:\"header\" name:\"opc-request-id\"`\n\n\t\/\/ Metadata about the request. This information will not be transmitted to the service, but\n\t\/\/ represents information that the SDK will consume to drive retry behavior.\n\tRequestMetadata common.RequestMetadata\n}\n\nfunc (request UpdateVolumeGroupBackupRequest) String() string {\n\treturn common.PointerString(request)\n}\n\n\/\/ HTTPRequest implements the OCIRequest interface\nfunc (request UpdateVolumeGroupBackupRequest) HTTPRequest(method, path string) (http.Request, error) {\n\treturn common.MakeDefaultHTTPRequestWithTaggedStruct(method, path, request)\n}\n\n\/\/ RetryPolicy implements the OCIRetryableRequest interface. This retrieves the specified retry policy.\nfunc (request UpdateVolumeGroupBackupRequest) RetryPolicy() *common.RetryPolicy {\n\treturn request.RequestMetadata.RetryPolicy\n}\n\n\/\/ UpdateVolumeGroupBackupResponse wrapper for the UpdateVolumeGroupBackup operation\ntype UpdateVolumeGroupBackupResponse struct {\n\n\t\/\/ The underlying http response\n\tRawResponse *http.Response\n\n\t\/\/ The VolumeGroupBackup instance\n\tVolumeGroupBackup `presentIn:\"body\"`\n\n\t\/\/ For optimistic concurrency control. See `if-match`.\n\tEtag *string `presentIn:\"header\" name:\"etag\"`\n}\n\nfunc (response UpdateVolumeGroupBackupResponse) String() string {\n\treturn common.PointerString(response)\n}\n\n\/\/ HTTPResponse implements the OCIResponse interface\nfunc (response UpdateVolumeGroupBackupResponse) HTTPResponse() *http.Response {\n\treturn response.RawResponse\n}\n","avg_line_length":41.972972973,"max_line_length":246,"alphanum_fraction":0.793303284} +{"size":917,"ext":"go","lang":"Go","max_stars_count":1.0,"content":"package rpc\n\nimport (\n\t\"github.com\/mcmaur\/mc-ms-authentication\/server\/models\"\n)\n\n\/\/ GetUserByIDRpc : select user by id and return detailed infos\nfunc (server *Server) GetUserByIDRpc(userid uint32, user *models.User) error {\n\tnewuser := models.User{}\n\tuser, err := newuser.FindUserByID(server.DB, userid)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}\n\n\/\/ GetUserByEmailRpc : select user by email addresses and return detailed infos\nfunc (server *Server) GetUserByEmailRpc(userEmail string, user *models.User) error {\n\tnewuser := models.User{}\n\tuser, err := newuser.FindUserByEMail(server.DB, userEmail)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}\n\n\/\/ DeleteUserByIDRpc : delete user filtering by user id\nfunc (server *Server) DeleteUserByIDRpc(userid uint32, rows int) error {\n\tnewuser := models.User{}\n\tuser, err := newuser.DeleteUserByID(server.DB, uint32(userid))\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}\n","avg_line_length":25.4722222222,"max_line_length":84,"alphanum_fraction":0.7284623773} +{"size":352,"ext":"go","lang":"Go","max_stars_count":null,"content":"package eventing\n\ntype Header struct {\n\tCommand string `json:\"command\"`\n\tSubcommand string `json:\"subcommand\"`\n\tMetadata string `json:\"metadata\"`\n}\n\ntype Payload struct {\n\tMessage string `json:\"message\"`\n}\n\ntype Message struct {\n\tHeader Header\n\tPayload Payload\n\tResChan chan *Response\n}\n\ntype Response struct {\n\tresponse string\n\terr error\n}\n","avg_line_length":15.3043478261,"max_line_length":38,"alphanum_fraction":0.7215909091} +{"size":25081,"ext":"go","lang":"Go","max_stars_count":null,"content":"package validation\n\nimport (\n\t\"fmt\"\n\t\"math\"\n\t\"reflect\"\n\t\"strconv\"\n\t\"strings\"\n\t\"text\/scanner\"\n\n\t\"github.com\/emwalker\/graphql-go\/errors\"\n\t\"github.com\/emwalker\/graphql-go\/common\"\n\t\"github.com\/emwalker\/graphql-go\/query\"\n\t\"github.com\/emwalker\/graphql-go\/schema\"\n)\n\ntype varSet map[*common.InputValue]struct{}\n\ntype selectionPair struct{ a, b query.Selection }\n\ntype fieldInfo struct {\n\tsf *schema.Field\n\tparent schema.NamedType\n}\n\ntype context struct {\n\tschema *schema.Schema\n\tdoc *query.Document\n\terrs []*errors.QueryError\n\topErrs map[*query.Operation][]*errors.QueryError\n\tusedVars map[*query.Operation]varSet\n\tfieldMap map[*query.Field]fieldInfo\n\toverlapValidated map[selectionPair]struct{}\n\tmaxDepth int\n}\n\nfunc (c *context) addErr(loc errors.Location, rule string, format string, a ...interface{}) {\n\tc.addErrMultiLoc([]errors.Location{loc}, rule, format, a...)\n}\n\nfunc (c *context) addErrMultiLoc(locs []errors.Location, rule string, format string, a ...interface{}) {\n\tc.errs = append(c.errs, &errors.QueryError{\n\t\tMessage: fmt.Sprintf(format, a...),\n\t\tLocations: locs,\n\t\tRule: rule,\n\t})\n}\n\ntype opContext struct {\n\t*context\n\tops []*query.Operation\n}\n\nfunc newContext(s *schema.Schema, doc *query.Document, maxDepth int) *context {\n\treturn &context{\n\t\tschema: s,\n\t\tdoc: doc,\n\t\topErrs: make(map[*query.Operation][]*errors.QueryError),\n\t\tusedVars: make(map[*query.Operation]varSet),\n\t\tfieldMap: make(map[*query.Field]fieldInfo),\n\t\toverlapValidated: make(map[selectionPair]struct{}),\n\t\tmaxDepth: maxDepth,\n\t}\n}\n\nfunc Validate(s *schema.Schema, doc *query.Document, maxDepth int) []*errors.QueryError {\n\tc := newContext(s, doc, maxDepth)\n\n\topNames := make(nameSet)\n\tfragUsedBy := make(map[*query.FragmentDecl][]*query.Operation)\n\tfor _, op := range doc.Operations {\n\t\tc.usedVars[op] = make(varSet)\n\t\topc := &opContext{c, []*query.Operation{op}}\n\n\t\t\/\/ Check if max depth is exceeded, if it's set. If max depth is exceeded,\n\t\t\/\/ don't continue to validate the document and exit early.\n\t\tif validateMaxDepth(opc, op.Selections, 1) {\n\t\t\treturn c.errs\n\t\t}\n\n\t\tif op.Name.Name == \"\" && len(doc.Operations) != 1 {\n\t\t\tc.addErr(op.Loc, \"LoneAnonymousOperation\", \"This anonymous operation must be the only defined operation.\")\n\t\t}\n\t\tif op.Name.Name != \"\" {\n\t\t\tvalidateName(c, opNames, op.Name, \"UniqueOperationNames\", \"operation\")\n\t\t}\n\n\t\tvalidateDirectives(opc, string(op.Type), op.Directives)\n\n\t\tvarNames := make(nameSet)\n\t\tfor _, v := range op.Vars {\n\t\t\tvalidateName(c, varNames, v.Name, \"UniqueVariableNames\", \"variable\")\n\n\t\t\tt := resolveType(c, v.Type)\n\t\t\tif !canBeInput(t) {\n\t\t\t\tc.addErr(v.TypeLoc, \"VariablesAreInputTypes\", \"Variable %q cannot be non-input type %q.\", \"$\"+v.Name.Name, t)\n\t\t\t}\n\n\t\t\tif v.Default != nil {\n\t\t\t\tvalidateLiteral(opc, v.Default)\n\n\t\t\t\tif t != nil {\n\t\t\t\t\tif nn, ok := t.(*common.NonNull); ok {\n\t\t\t\t\t\tc.addErr(v.Default.Location(), \"DefaultValuesOfCorrectType\", \"Variable %q of type %q is required and will not use the default value. Perhaps you meant to use type %q.\", \"$\"+v.Name.Name, t, nn.OfType)\n\t\t\t\t\t}\n\n\t\t\t\t\tif ok, reason := validateValueType(opc, v.Default, t); !ok {\n\t\t\t\t\t\tc.addErr(v.Default.Location(), \"DefaultValuesOfCorrectType\", \"Variable %q of type %q has invalid default value %s.\\n%s\", \"$\"+v.Name.Name, t, v.Default, reason)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tvar entryPoint schema.NamedType\n\t\tswitch op.Type {\n\t\tcase query.Query:\n\t\t\tentryPoint = s.EntryPoints[\"query\"]\n\t\tcase query.Mutation:\n\t\t\tentryPoint = s.EntryPoints[\"mutation\"]\n\t\tcase query.Subscription:\n\t\t\tentryPoint = s.EntryPoints[\"subscription\"]\n\t\tdefault:\n\t\t\tpanic(\"unreachable\")\n\t\t}\n\n\t\tvalidateSelectionSet(opc, op.Selections, entryPoint)\n\n\t\tfragUsed := make(map[*query.FragmentDecl]struct{})\n\t\tmarkUsedFragments(c, op.Selections, fragUsed)\n\t\tfor frag := range fragUsed {\n\t\t\tfragUsedBy[frag] = append(fragUsedBy[frag], op)\n\t\t}\n\t}\n\n\tfragNames := make(nameSet)\n\tfragVisited := make(map[*query.FragmentDecl]struct{})\n\tfor _, frag := range doc.Fragments {\n\t\topc := &opContext{c, fragUsedBy[frag]}\n\n\t\tvalidateName(c, fragNames, frag.Name, \"UniqueFragmentNames\", \"fragment\")\n\t\tvalidateDirectives(opc, \"FRAGMENT_DEFINITION\", frag.Directives)\n\n\t\tt := unwrapType(resolveType(c, &frag.On))\n\t\t\/\/ continue even if t is nil\n\t\tif t != nil && !canBeFragment(t) {\n\t\t\tc.addErr(frag.On.Loc, \"FragmentsOnCompositeTypes\", \"Fragment %q cannot condition on non composite type %q.\", frag.Name.Name, t)\n\t\t\tcontinue\n\t\t}\n\n\t\tvalidateSelectionSet(opc, frag.Selections, t)\n\n\t\tif _, ok := fragVisited[frag]; !ok {\n\t\t\tdetectFragmentCycle(c, frag.Selections, fragVisited, nil, map[string]int{frag.Name.Name: 0})\n\t\t}\n\t}\n\n\tfor _, frag := range doc.Fragments {\n\t\tif len(fragUsedBy[frag]) == 0 {\n\t\t\tc.addErr(frag.Loc, \"NoUnusedFragments\", \"Fragment %q is never used.\", frag.Name.Name)\n\t\t}\n\t}\n\n\tfor _, op := range doc.Operations {\n\t\tc.errs = append(c.errs, c.opErrs[op]...)\n\n\t\topUsedVars := c.usedVars[op]\n\t\tfor _, v := range op.Vars {\n\t\t\tif _, ok := opUsedVars[v]; !ok {\n\t\t\t\topSuffix := \"\"\n\t\t\t\tif op.Name.Name != \"\" {\n\t\t\t\t\topSuffix = fmt.Sprintf(\" in operation %q\", op.Name.Name)\n\t\t\t\t}\n\t\t\t\tc.addErr(v.Loc, \"NoUnusedVariables\", \"Variable %q is never used%s.\", \"$\"+v.Name.Name, opSuffix)\n\t\t\t}\n\t\t}\n\t}\n\n\treturn c.errs\n}\n\n\/\/ validates the query doesn't go deeper than maxDepth (if set). Returns whether\n\/\/ or not query validated max depth to avoid excessive recursion.\nfunc validateMaxDepth(c *opContext, sels []query.Selection, depth int) bool {\n\t\/\/ maxDepth checking is turned off when maxDepth is 0\n\tif c.maxDepth == 0 {\n\t\treturn false\n\t}\n\n\texceededMaxDepth := false\n\n\tfor _, sel := range sels {\n\t\tswitch sel := sel.(type) {\n\t\tcase *query.Field:\n\t\t\tif depth > c.maxDepth {\n\t\t\t\texceededMaxDepth = true\n\t\t\t\tc.addErr(sel.Alias.Loc, \"MaxDepthExceeded\", \"Field %q has depth %d that exceeds max depth %d\", sel.Name.Name, depth, c.maxDepth)\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\texceededMaxDepth = exceededMaxDepth || validateMaxDepth(c, sel.Selections, depth+1)\n\t\tcase *query.InlineFragment:\n\t\t\t\/\/ Depth is not checked because inline fragments resolve to other fields which are checked.\n\t\t\t\/\/ Depth is not incremented because inline fragments have the same depth as neighboring fields\n\t\t\texceededMaxDepth = exceededMaxDepth || validateMaxDepth(c, sel.Selections, depth)\n\t\tcase *query.FragmentSpread:\n\t\t\t\/\/ Depth is not checked because fragments resolve to other fields which are checked.\n\t\t\tfrag := c.doc.Fragments.Get(sel.Name.Name)\n\t\t\tif frag == nil {\n\t\t\t\t\/\/ In case of unknown fragment (invalid request), ignore max depth evaluation\n\t\t\t\tc.addErr(sel.Loc, \"MaxDepthEvaluationError\", \"Unknown fragment %q. Unable to evaluate depth.\", sel.Name.Name)\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\t\/\/ Depth is not incremented because fragments have the same depth as surrounding fields\n\t\t\texceededMaxDepth = exceededMaxDepth || validateMaxDepth(c, frag.Selections, depth)\n\t\t}\n\t}\n\n\treturn exceededMaxDepth\n}\n\nfunc validateSelectionSet(c *opContext, sels []query.Selection, t schema.NamedType) {\n\tfor _, sel := range sels {\n\t\tvalidateSelection(c, sel, t)\n\t}\n\n\tfor i, a := range sels {\n\t\tfor _, b := range sels[i+1:] {\n\t\t\tc.validateOverlap(a, b, nil, nil)\n\t\t}\n\t}\n}\n\nfunc validateSelection(c *opContext, sel query.Selection, t schema.NamedType) {\n\tswitch sel := sel.(type) {\n\tcase *query.Field:\n\t\tvalidateDirectives(c, \"FIELD\", sel.Directives)\n\n\t\tfieldName := sel.Name.Name\n\t\tvar f *schema.Field\n\t\tswitch fieldName {\n\t\tcase \"__typename\":\n\t\t\tf = &schema.Field{\n\t\t\t\tName: \"__typename\",\n\t\t\t\tType: c.schema.Types[\"String\"],\n\t\t\t}\n\t\tcase \"__schema\":\n\t\t\tf = &schema.Field{\n\t\t\t\tName: \"__schema\",\n\t\t\t\tType: c.schema.Types[\"__Schema\"],\n\t\t\t}\n\t\tcase \"__type\":\n\t\t\tf = &schema.Field{\n\t\t\t\tName: \"__type\",\n\t\t\t\tArgs: common.InputValueList{\n\t\t\t\t\t&common.InputValue{\n\t\t\t\t\t\tName: common.Ident{Name: \"name\"},\n\t\t\t\t\t\tType: &common.NonNull{OfType: c.schema.Types[\"String\"]},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tType: c.schema.Types[\"__Type\"],\n\t\t\t}\n\t\tdefault:\n\t\t\tf = fields(t).Get(fieldName)\n\t\t\tif f == nil && t != nil {\n\t\t\t\tsuggestion := makeSuggestion(\"Did you mean\", fields(t).Names(), fieldName)\n\t\t\t\tc.addErr(sel.Alias.Loc, \"FieldsOnCorrectType\", \"Cannot query field %q on type %q.%s\", fieldName, t, suggestion)\n\t\t\t}\n\t\t}\n\t\tc.fieldMap[sel] = fieldInfo{sf: f, parent: t}\n\n\t\tvalidateArgumentLiterals(c, sel.Arguments)\n\t\tif f != nil {\n\t\t\tvalidateArgumentTypes(c, sel.Arguments, f.Args, sel.Alias.Loc,\n\t\t\t\tfunc() string { return fmt.Sprintf(\"field %q of type %q\", fieldName, t) },\n\t\t\t\tfunc() string { return fmt.Sprintf(\"Field %q\", fieldName) },\n\t\t\t)\n\t\t}\n\n\t\tvar ft common.Type\n\t\tif f != nil {\n\t\t\tft = f.Type\n\t\t\tsf := hasSubfields(ft)\n\t\t\tif sf && sel.Selections == nil {\n\t\t\t\tc.addErr(sel.Alias.Loc, \"ScalarLeafs\", \"Field %q of type %q must have a selection of subfields. Did you mean \\\"%s { ... }\\\"?\", fieldName, ft, fieldName)\n\t\t\t}\n\t\t\tif !sf && sel.Selections != nil {\n\t\t\t\tc.addErr(sel.SelectionSetLoc, \"ScalarLeafs\", \"Field %q must not have a selection since type %q has no subfields.\", fieldName, ft)\n\t\t\t}\n\t\t}\n\t\tif sel.Selections != nil {\n\t\t\tvalidateSelectionSet(c, sel.Selections, unwrapType(ft))\n\t\t}\n\n\tcase *query.InlineFragment:\n\t\tvalidateDirectives(c, \"INLINE_FRAGMENT\", sel.Directives)\n\t\tif sel.On.Name != \"\" {\n\t\t\tfragTyp := unwrapType(resolveType(c.context, &sel.On))\n\t\t\tif fragTyp != nil && !compatible(t, fragTyp) {\n\t\t\t\tc.addErr(sel.Loc, \"PossibleFragmentSpreads\", \"Fragment cannot be spread here as objects of type %q can never be of type %q.\", t, fragTyp)\n\t\t\t}\n\t\t\tt = fragTyp\n\t\t\t\/\/ continue even if t is nil\n\t\t}\n\t\tif t != nil && !canBeFragment(t) {\n\t\t\tc.addErr(sel.On.Loc, \"FragmentsOnCompositeTypes\", \"Fragment cannot condition on non composite type %q.\", t)\n\t\t\treturn\n\t\t}\n\t\tvalidateSelectionSet(c, sel.Selections, unwrapType(t))\n\n\tcase *query.FragmentSpread:\n\t\tvalidateDirectives(c, \"FRAGMENT_SPREAD\", sel.Directives)\n\t\tfrag := c.doc.Fragments.Get(sel.Name.Name)\n\t\tif frag == nil {\n\t\t\tc.addErr(sel.Name.Loc, \"KnownFragmentNames\", \"Unknown fragment %q.\", sel.Name.Name)\n\t\t\treturn\n\t\t}\n\t\tfragTyp := c.schema.Types[frag.On.Name]\n\t\tif !compatible(t, fragTyp) {\n\t\t\tc.addErr(sel.Loc, \"PossibleFragmentSpreads\", \"Fragment %q cannot be spread here as objects of type %q can never be of type %q.\", frag.Name.Name, t, fragTyp)\n\t\t}\n\n\tdefault:\n\t\tpanic(\"unreachable\")\n\t}\n}\n\nfunc compatible(a, b common.Type) bool {\n\tfor _, pta := range possibleTypes(a) {\n\t\tfor _, ptb := range possibleTypes(b) {\n\t\t\tif pta == ptb {\n\t\t\t\treturn true\n\t\t\t}\n\t\t}\n\t}\n\treturn false\n}\n\nfunc possibleTypes(t common.Type) []*schema.Object {\n\tswitch t := t.(type) {\n\tcase *schema.Object:\n\t\treturn []*schema.Object{t}\n\tcase *schema.Interface:\n\t\treturn t.PossibleTypes\n\tcase *schema.Union:\n\t\treturn t.PossibleTypes\n\tdefault:\n\t\treturn nil\n\t}\n}\n\nfunc markUsedFragments(c *context, sels []query.Selection, fragUsed map[*query.FragmentDecl]struct{}) {\n\tfor _, sel := range sels {\n\t\tswitch sel := sel.(type) {\n\t\tcase *query.Field:\n\t\t\tif sel.Selections != nil {\n\t\t\t\tmarkUsedFragments(c, sel.Selections, fragUsed)\n\t\t\t}\n\n\t\tcase *query.InlineFragment:\n\t\t\tmarkUsedFragments(c, sel.Selections, fragUsed)\n\n\t\tcase *query.FragmentSpread:\n\t\t\tfrag := c.doc.Fragments.Get(sel.Name.Name)\n\t\t\tif frag == nil {\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tif _, ok := fragUsed[frag]; ok {\n\t\t\t\treturn\n\t\t\t}\n\t\t\tfragUsed[frag] = struct{}{}\n\t\t\tmarkUsedFragments(c, frag.Selections, fragUsed)\n\n\t\tdefault:\n\t\t\tpanic(\"unreachable\")\n\t\t}\n\t}\n}\n\nfunc detectFragmentCycle(c *context, sels []query.Selection, fragVisited map[*query.FragmentDecl]struct{}, spreadPath []*query.FragmentSpread, spreadPathIndex map[string]int) {\n\tfor _, sel := range sels {\n\t\tdetectFragmentCycleSel(c, sel, fragVisited, spreadPath, spreadPathIndex)\n\t}\n}\n\nfunc detectFragmentCycleSel(c *context, sel query.Selection, fragVisited map[*query.FragmentDecl]struct{}, spreadPath []*query.FragmentSpread, spreadPathIndex map[string]int) {\n\tswitch sel := sel.(type) {\n\tcase *query.Field:\n\t\tif sel.Selections != nil {\n\t\t\tdetectFragmentCycle(c, sel.Selections, fragVisited, spreadPath, spreadPathIndex)\n\t\t}\n\n\tcase *query.InlineFragment:\n\t\tdetectFragmentCycle(c, sel.Selections, fragVisited, spreadPath, spreadPathIndex)\n\n\tcase *query.FragmentSpread:\n\t\tfrag := c.doc.Fragments.Get(sel.Name.Name)\n\t\tif frag == nil {\n\t\t\treturn\n\t\t}\n\n\t\tspreadPath = append(spreadPath, sel)\n\t\tif i, ok := spreadPathIndex[frag.Name.Name]; ok {\n\t\t\tcyclePath := spreadPath[i:]\n\t\t\tvia := \"\"\n\t\t\tif len(cyclePath) > 1 {\n\t\t\t\tnames := make([]string, len(cyclePath)-1)\n\t\t\t\tfor i, frag := range cyclePath[:len(cyclePath)-1] {\n\t\t\t\t\tnames[i] = frag.Name.Name\n\t\t\t\t}\n\t\t\t\tvia = \" via \" + strings.Join(names, \", \")\n\t\t\t}\n\n\t\t\tlocs := make([]errors.Location, len(cyclePath))\n\t\t\tfor i, frag := range cyclePath {\n\t\t\t\tlocs[i] = frag.Loc\n\t\t\t}\n\t\t\tc.addErrMultiLoc(locs, \"NoFragmentCycles\", \"Cannot spread fragment %q within itself%s.\", frag.Name.Name, via)\n\t\t\treturn\n\t\t}\n\n\t\tif _, ok := fragVisited[frag]; ok {\n\t\t\treturn\n\t\t}\n\t\tfragVisited[frag] = struct{}{}\n\n\t\tspreadPathIndex[frag.Name.Name] = len(spreadPath)\n\t\tdetectFragmentCycle(c, frag.Selections, fragVisited, spreadPath, spreadPathIndex)\n\t\tdelete(spreadPathIndex, frag.Name.Name)\n\n\tdefault:\n\t\tpanic(\"unreachable\")\n\t}\n}\n\nfunc (c *context) validateOverlap(a, b query.Selection, reasons *[]string, locs *[]errors.Location) {\n\tif a == b {\n\t\treturn\n\t}\n\n\tif _, ok := c.overlapValidated[selectionPair{a, b}]; ok {\n\t\treturn\n\t}\n\tc.overlapValidated[selectionPair{a, b}] = struct{}{}\n\tc.overlapValidated[selectionPair{b, a}] = struct{}{}\n\n\tswitch a := a.(type) {\n\tcase *query.Field:\n\t\tswitch b := b.(type) {\n\t\tcase *query.Field:\n\t\t\tif b.Alias.Loc.Before(a.Alias.Loc) {\n\t\t\t\ta, b = b, a\n\t\t\t}\n\t\t\tif reasons2, locs2 := c.validateFieldOverlap(a, b); len(reasons2) != 0 {\n\t\t\t\tlocs2 = append(locs2, a.Alias.Loc, b.Alias.Loc)\n\t\t\t\tif reasons == nil {\n\t\t\t\t\tc.addErrMultiLoc(locs2, \"OverlappingFieldsCanBeMerged\", \"Fields %q conflict because %s. Use different aliases on the fields to fetch both if this was intentional.\", a.Alias.Name, strings.Join(reasons2, \" and \"))\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t\tfor _, r := range reasons2 {\n\t\t\t\t\t*reasons = append(*reasons, fmt.Sprintf(\"subfields %q conflict because %s\", a.Alias.Name, r))\n\t\t\t\t}\n\t\t\t\t*locs = append(*locs, locs2...)\n\t\t\t}\n\n\t\tcase *query.InlineFragment:\n\t\t\tfor _, sel := range b.Selections {\n\t\t\t\tc.validateOverlap(a, sel, reasons, locs)\n\t\t\t}\n\n\t\tcase *query.FragmentSpread:\n\t\t\tif frag := c.doc.Fragments.Get(b.Name.Name); frag != nil {\n\t\t\t\tfor _, sel := range frag.Selections {\n\t\t\t\t\tc.validateOverlap(a, sel, reasons, locs)\n\t\t\t\t}\n\t\t\t}\n\n\t\tdefault:\n\t\t\tpanic(\"unreachable\")\n\t\t}\n\n\tcase *query.InlineFragment:\n\t\tfor _, sel := range a.Selections {\n\t\t\tc.validateOverlap(sel, b, reasons, locs)\n\t\t}\n\n\tcase *query.FragmentSpread:\n\t\tif frag := c.doc.Fragments.Get(a.Name.Name); frag != nil {\n\t\t\tfor _, sel := range frag.Selections {\n\t\t\t\tc.validateOverlap(sel, b, reasons, locs)\n\t\t\t}\n\t\t}\n\n\tdefault:\n\t\tpanic(\"unreachable\")\n\t}\n}\n\nfunc (c *context) validateFieldOverlap(a, b *query.Field) ([]string, []errors.Location) {\n\tif a.Alias.Name != b.Alias.Name {\n\t\treturn nil, nil\n\t}\n\n\tif asf := c.fieldMap[a].sf; asf != nil {\n\t\tif bsf := c.fieldMap[b].sf; bsf != nil {\n\t\t\tif !typesCompatible(asf.Type, bsf.Type) {\n\t\t\t\treturn []string{fmt.Sprintf(\"they return conflicting types %s and %s\", asf.Type, bsf.Type)}, nil\n\t\t\t}\n\t\t}\n\t}\n\n\tat := c.fieldMap[a].parent\n\tbt := c.fieldMap[b].parent\n\tif at == nil || bt == nil || at == bt {\n\t\tif a.Name.Name != b.Name.Name {\n\t\t\treturn []string{fmt.Sprintf(\"%s and %s are different fields\", a.Name.Name, b.Name.Name)}, nil\n\t\t}\n\n\t\tif argumentsConflict(a.Arguments, b.Arguments) {\n\t\t\treturn []string{\"they have differing arguments\"}, nil\n\t\t}\n\t}\n\n\tvar reasons []string\n\tvar locs []errors.Location\n\tfor _, a2 := range a.Selections {\n\t\tfor _, b2 := range b.Selections {\n\t\t\tc.validateOverlap(a2, b2, &reasons, &locs)\n\t\t}\n\t}\n\treturn reasons, locs\n}\n\nfunc argumentsConflict(a, b common.ArgumentList) bool {\n\tif len(a) != len(b) {\n\t\treturn true\n\t}\n\tfor _, argA := range a {\n\t\tvalB, ok := b.Get(argA.Name.Name)\n\t\tif !ok || !reflect.DeepEqual(argA.Value.Value(nil), valB.Value(nil)) {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}\n\nfunc fields(t common.Type) schema.FieldList {\n\tswitch t := t.(type) {\n\tcase *schema.Object:\n\t\treturn t.Fields\n\tcase *schema.Interface:\n\t\treturn t.Fields\n\tdefault:\n\t\treturn nil\n\t}\n}\n\nfunc unwrapType(t common.Type) schema.NamedType {\n\tif t == nil {\n\t\treturn nil\n\t}\n\tfor {\n\t\tswitch t2 := t.(type) {\n\t\tcase schema.NamedType:\n\t\t\treturn t2\n\t\tcase *common.List:\n\t\t\tt = t2.OfType\n\t\tcase *common.NonNull:\n\t\t\tt = t2.OfType\n\t\tdefault:\n\t\t\tpanic(\"unreachable\")\n\t\t}\n\t}\n}\n\nfunc resolveType(c *context, t common.Type) common.Type {\n\tt2, err := common.ResolveType(t, c.schema.Resolve)\n\tif err != nil {\n\t\tc.errs = append(c.errs, err)\n\t}\n\treturn t2\n}\n\nfunc validateDirectives(c *opContext, loc string, directives common.DirectiveList) {\n\tdirectiveNames := make(nameSet)\n\tfor _, d := range directives {\n\t\tdirName := d.Name.Name\n\t\tvalidateNameCustomMsg(c.context, directiveNames, d.Name, \"UniqueDirectivesPerLocation\", func() string {\n\t\t\treturn fmt.Sprintf(\"The directive %q can only be used once at this location.\", dirName)\n\t\t})\n\n\t\tvalidateArgumentLiterals(c, d.Args)\n\n\t\tdd, ok := c.schema.Directives[dirName]\n\t\tif !ok {\n\t\t\tc.addErr(d.Name.Loc, \"KnownDirectives\", \"Unknown directive %q.\", dirName)\n\t\t\tcontinue\n\t\t}\n\n\t\tlocOK := false\n\t\tfor _, allowedLoc := range dd.Locs {\n\t\t\tif loc == allowedLoc {\n\t\t\t\tlocOK = true\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\tif !locOK {\n\t\t\tc.addErr(d.Name.Loc, \"KnownDirectives\", \"Directive %q may not be used on %s.\", dirName, loc)\n\t\t}\n\n\t\tvalidateArgumentTypes(c, d.Args, dd.Args, d.Name.Loc,\n\t\t\tfunc() string { return fmt.Sprintf(\"directive %q\", \"@\"+dirName) },\n\t\t\tfunc() string { return fmt.Sprintf(\"Directive %q\", \"@\"+dirName) },\n\t\t)\n\t}\n}\n\ntype nameSet map[string]errors.Location\n\nfunc validateName(c *context, set nameSet, name common.Ident, rule string, kind string) {\n\tvalidateNameCustomMsg(c, set, name, rule, func() string {\n\t\treturn fmt.Sprintf(\"There can be only one %s named %q.\", kind, name.Name)\n\t})\n}\n\nfunc validateNameCustomMsg(c *context, set nameSet, name common.Ident, rule string, msg func() string) {\n\tif loc, ok := set[name.Name]; ok {\n\t\tc.addErrMultiLoc([]errors.Location{loc, name.Loc}, rule, msg())\n\t\treturn\n\t}\n\tset[name.Name] = name.Loc\n}\n\nfunc validateArgumentTypes(c *opContext, args common.ArgumentList, argDecls common.InputValueList, loc errors.Location, owner1, owner2 func() string) {\n\tfor _, selArg := range args {\n\t\targ := argDecls.Get(selArg.Name.Name)\n\t\tif arg == nil {\n\t\t\tc.addErr(selArg.Name.Loc, \"KnownArgumentNames\", \"Unknown argument %q on %s.\", selArg.Name.Name, owner1())\n\t\t\tcontinue\n\t\t}\n\t\tvalue := selArg.Value\n\t\tif ok, reason := validateValueType(c, value, arg.Type); !ok {\n\t\t\tc.addErr(value.Location(), \"ArgumentsOfCorrectType\", \"Argument %q has invalid value %s.\\n%s\", arg.Name.Name, value, reason)\n\t\t}\n\t}\n\tfor _, decl := range argDecls {\n\t\tif _, ok := decl.Type.(*common.NonNull); ok {\n\t\t\tif _, ok := args.Get(decl.Name.Name); !ok {\n\t\t\t\tc.addErr(loc, \"ProvidedNonNullArguments\", \"%s argument %q of type %q is required but not provided.\", owner2(), decl.Name.Name, decl.Type)\n\t\t\t}\n\t\t}\n\t}\n}\n\nfunc validateArgumentLiterals(c *opContext, args common.ArgumentList) {\n\targNames := make(nameSet)\n\tfor _, arg := range args {\n\t\tvalidateName(c.context, argNames, arg.Name, \"UniqueArgumentNames\", \"argument\")\n\t\tvalidateLiteral(c, arg.Value)\n\t}\n}\n\nfunc validateLiteral(c *opContext, l common.Literal) {\n\tswitch l := l.(type) {\n\tcase *common.ObjectLit:\n\t\tfieldNames := make(nameSet)\n\t\tfor _, f := range l.Fields {\n\t\t\tvalidateName(c.context, fieldNames, f.Name, \"UniqueInputFieldNames\", \"input field\")\n\t\t\tvalidateLiteral(c, f.Value)\n\t\t}\n\tcase *common.ListLit:\n\t\tfor _, entry := range l.Entries {\n\t\t\tvalidateLiteral(c, entry)\n\t\t}\n\tcase *common.Variable:\n\t\tfor _, op := range c.ops {\n\t\t\tv := op.Vars.Get(l.Name)\n\t\t\tif v == nil {\n\t\t\t\tbyOp := \"\"\n\t\t\t\tif op.Name.Name != \"\" {\n\t\t\t\t\tbyOp = fmt.Sprintf(\" by operation %q\", op.Name.Name)\n\t\t\t\t}\n\t\t\t\tc.opErrs[op] = append(c.opErrs[op], &errors.QueryError{\n\t\t\t\t\tMessage: fmt.Sprintf(\"Variable %q is not defined%s.\", \"$\"+l.Name, byOp),\n\t\t\t\t\tLocations: []errors.Location{l.Loc, op.Loc},\n\t\t\t\t\tRule: \"NoUndefinedVariables\",\n\t\t\t\t})\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tc.usedVars[op][v] = struct{}{}\n\t\t}\n\t}\n}\n\nfunc validateValueType(c *opContext, v common.Literal, t common.Type) (bool, string) {\n\tif v, ok := v.(*common.Variable); ok {\n\t\tfor _, op := range c.ops {\n\t\t\tif v2 := op.Vars.Get(v.Name); v2 != nil {\n\t\t\t\tt2, err := common.ResolveType(v2.Type, c.schema.Resolve)\n\t\t\t\tif _, ok := t2.(*common.NonNull); !ok && v2.Default != nil {\n\t\t\t\t\tt2 = &common.NonNull{OfType: t2}\n\t\t\t\t}\n\t\t\t\tif err == nil && !typeCanBeUsedAs(t2, t) {\n\t\t\t\t\tc.addErrMultiLoc([]errors.Location{v2.Loc, v.Loc}, \"VariablesInAllowedPosition\", \"Variable %q of type %q used in position expecting type %q.\", \"$\"+v.Name, t2, t)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\treturn true, \"\"\n\t}\n\n\tif nn, ok := t.(*common.NonNull); ok {\n\t\tif isNull(v) {\n\t\t\treturn false, fmt.Sprintf(\"Expected %q, found null.\", t)\n\t\t}\n\t\tt = nn.OfType\n\t}\n\tif isNull(v) {\n\t\treturn true, \"\"\n\t}\n\n\tswitch t := t.(type) {\n\tcase *schema.Scalar, *schema.Enum:\n\t\tif lit, ok := v.(*common.BasicLit); ok {\n\t\t\tif validateBasicLit(lit, t) {\n\t\t\t\treturn true, \"\"\n\t\t\t}\n\t\t}\n\n\tcase *common.List:\n\t\tlist, ok := v.(*common.ListLit)\n\t\tif !ok {\n\t\t\treturn validateValueType(c, v, t.OfType) \/\/ single value instead of list\n\t\t}\n\t\tfor i, entry := range list.Entries {\n\t\t\tif ok, reason := validateValueType(c, entry, t.OfType); !ok {\n\t\t\t\treturn false, fmt.Sprintf(\"In element #%d: %s\", i, reason)\n\t\t\t}\n\t\t}\n\t\treturn true, \"\"\n\n\tcase *schema.InputObject:\n\t\tv, ok := v.(*common.ObjectLit)\n\t\tif !ok {\n\t\t\treturn false, fmt.Sprintf(\"Expected %q, found not an object.\", t)\n\t\t}\n\t\tfor _, f := range v.Fields {\n\t\t\tname := f.Name.Name\n\t\t\tiv := t.Values.Get(name)\n\t\t\tif iv == nil {\n\t\t\t\treturn false, fmt.Sprintf(\"In field %q: Unknown field.\", name)\n\t\t\t}\n\t\t\tif ok, reason := validateValueType(c, f.Value, iv.Type); !ok {\n\t\t\t\treturn false, fmt.Sprintf(\"In field %q: %s\", name, reason)\n\t\t\t}\n\t\t}\n\t\tfor _, iv := range t.Values {\n\t\t\tfound := false\n\t\t\tfor _, f := range v.Fields {\n\t\t\t\tif f.Name.Name == iv.Name.Name {\n\t\t\t\t\tfound = true\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\t\t\tif !found {\n\t\t\t\tif _, ok := iv.Type.(*common.NonNull); ok && iv.Default == nil {\n\t\t\t\t\treturn false, fmt.Sprintf(\"In field %q: Expected %q, found null.\", iv.Name.Name, iv.Type)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\treturn true, \"\"\n\t}\n\n\treturn false, fmt.Sprintf(\"Expected type %q, found %s.\", t, v)\n}\n\nfunc validateBasicLit(v *common.BasicLit, t common.Type) bool {\n\tswitch t := t.(type) {\n\tcase *schema.Scalar:\n\t\tswitch t.Name {\n\t\tcase \"Int\":\n\t\t\tif v.Type != scanner.Int {\n\t\t\t\treturn false\n\t\t\t}\n\t\t\tf, err := strconv.ParseFloat(v.Text, 64)\n\t\t\tif err != nil {\n\t\t\t\tpanic(err)\n\t\t\t}\n\t\t\treturn f >= math.MinInt32 && f <= math.MaxInt32\n\t\tcase \"Float\":\n\t\t\treturn v.Type == scanner.Int || v.Type == scanner.Float\n\t\tcase \"String\":\n\t\t\treturn v.Type == scanner.String\n\t\tcase \"Boolean\":\n\t\t\treturn v.Type == scanner.Ident && (v.Text == \"true\" || v.Text == \"false\")\n\t\tcase \"ID\":\n\t\t\treturn v.Type == scanner.Int || v.Type == scanner.String\n\t\tdefault:\n\t\t\t\/\/TODO: Type-check against expected type by Unmarshalling\n\t\t\treturn true\n\t\t}\n\n\tcase *schema.Enum:\n\t\tif v.Type != scanner.Ident {\n\t\t\treturn false\n\t\t}\n\t\tfor _, option := range t.Values {\n\t\t\tif option.Name == v.Text {\n\t\t\t\treturn true\n\t\t\t}\n\t\t}\n\t\treturn false\n\t}\n\n\treturn false\n}\n\nfunc canBeFragment(t common.Type) bool {\n\tswitch t.(type) {\n\tcase *schema.Object, *schema.Interface, *schema.Union:\n\t\treturn true\n\tdefault:\n\t\treturn false\n\t}\n}\n\nfunc canBeInput(t common.Type) bool {\n\tswitch t := t.(type) {\n\tcase *schema.InputObject, *schema.Scalar, *schema.Enum:\n\t\treturn true\n\tcase *common.List:\n\t\treturn canBeInput(t.OfType)\n\tcase *common.NonNull:\n\t\treturn canBeInput(t.OfType)\n\tdefault:\n\t\treturn false\n\t}\n}\n\nfunc hasSubfields(t common.Type) bool {\n\tswitch t := t.(type) {\n\tcase *schema.Object, *schema.Interface, *schema.Union:\n\t\treturn true\n\tcase *common.List:\n\t\treturn hasSubfields(t.OfType)\n\tcase *common.NonNull:\n\t\treturn hasSubfields(t.OfType)\n\tdefault:\n\t\treturn false\n\t}\n}\n\nfunc isLeaf(t common.Type) bool {\n\tswitch t.(type) {\n\tcase *schema.Scalar, *schema.Enum:\n\t\treturn true\n\tdefault:\n\t\treturn false\n\t}\n}\n\nfunc isNull(lit interface{}) bool {\n\t_, ok := lit.(*common.NullLit)\n\treturn ok\n}\n\nfunc typesCompatible(a, b common.Type) bool {\n\tal, aIsList := a.(*common.List)\n\tbl, bIsList := b.(*common.List)\n\tif aIsList || bIsList {\n\t\treturn aIsList && bIsList && typesCompatible(al.OfType, bl.OfType)\n\t}\n\n\tann, aIsNN := a.(*common.NonNull)\n\tbnn, bIsNN := b.(*common.NonNull)\n\tif aIsNN || bIsNN {\n\t\treturn aIsNN && bIsNN && typesCompatible(ann.OfType, bnn.OfType)\n\t}\n\n\tif isLeaf(a) || isLeaf(b) {\n\t\treturn a == b\n\t}\n\n\treturn true\n}\n\nfunc typeCanBeUsedAs(t, as common.Type) bool {\n\tnnT, okT := t.(*common.NonNull)\n\tif okT {\n\t\tt = nnT.OfType\n\t}\n\n\tnnAs, okAs := as.(*common.NonNull)\n\tif okAs {\n\t\tas = nnAs.OfType\n\t\tif !okT {\n\t\t\treturn false \/\/ nullable can not be used as non-null\n\t\t}\n\t}\n\n\tif t == as {\n\t\treturn true\n\t}\n\n\tif lT, ok := t.(*common.List); ok {\n\t\tif lAs, ok := as.(*common.List); ok {\n\t\t\treturn typeCanBeUsedAs(lT.OfType, lAs.OfType)\n\t\t}\n\t}\n\treturn false\n}\n","avg_line_length":27.5615384615,"max_line_length":216,"alphanum_fraction":0.65611419} +{"size":64401,"ext":"go","lang":"Go","max_stars_count":252.0,"content":"\/\/ Copyright 2020 Google LLC\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\n\/\/ Code generated by protoc-gen-go. DO NOT EDIT.\n\/\/ versions:\n\/\/ \tprotoc-gen-go v1.26.0\n\/\/ \tprotoc v3.12.2\n\/\/ source: google\/cloud\/recommender\/v1beta1\/recommendation.proto\n\npackage recommender\n\nimport (\n\treflect \"reflect\"\n\tsync \"sync\"\n\n\t_ \"google.golang.org\/genproto\/googleapis\/api\/annotations\"\n\tmoney \"google.golang.org\/genproto\/googleapis\/type\/money\"\n\tprotoreflect \"google.golang.org\/protobuf\/reflect\/protoreflect\"\n\tprotoimpl \"google.golang.org\/protobuf\/runtime\/protoimpl\"\n\tdurationpb \"google.golang.org\/protobuf\/types\/known\/durationpb\"\n\tstructpb \"google.golang.org\/protobuf\/types\/known\/structpb\"\n\ttimestamppb \"google.golang.org\/protobuf\/types\/known\/timestamppb\"\n)\n\nconst (\n\t\/\/ Verify that this generated code is sufficiently up-to-date.\n\t_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)\n\t\/\/ Verify that runtime\/protoimpl is sufficiently up-to-date.\n\t_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)\n)\n\n\/\/ The category of the impact.\ntype Impact_Category int32\n\nconst (\n\t\/\/ Default unspecified category. Don't use directly.\n\tImpact_CATEGORY_UNSPECIFIED Impact_Category = 0\n\t\/\/ Indicates a potential increase or decrease in cost.\n\tImpact_COST Impact_Category = 1\n\t\/\/ Indicates a potential increase or decrease in security.\n\tImpact_SECURITY Impact_Category = 2\n\t\/\/ Indicates a potential increase or decrease in performance.\n\tImpact_PERFORMANCE Impact_Category = 3\n\t\/\/ Indicates a potential increase or decrease in manageability.\n\tImpact_MANAGEABILITY Impact_Category = 4\n)\n\n\/\/ Enum value maps for Impact_Category.\nvar (\n\tImpact_Category_name = map[int32]string{\n\t\t0: \"CATEGORY_UNSPECIFIED\",\n\t\t1: \"COST\",\n\t\t2: \"SECURITY\",\n\t\t3: \"PERFORMANCE\",\n\t\t4: \"MANAGEABILITY\",\n\t}\n\tImpact_Category_value = map[string]int32{\n\t\t\"CATEGORY_UNSPECIFIED\": 0,\n\t\t\"COST\": 1,\n\t\t\"SECURITY\": 2,\n\t\t\"PERFORMANCE\": 3,\n\t\t\"MANAGEABILITY\": 4,\n\t}\n)\n\nfunc (x Impact_Category) Enum() *Impact_Category {\n\tp := new(Impact_Category)\n\t*p = x\n\treturn p\n}\n\nfunc (x Impact_Category) String() string {\n\treturn protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x))\n}\n\nfunc (Impact_Category) Descriptor() protoreflect.EnumDescriptor {\n\treturn file_google_cloud_recommender_v1beta1_recommendation_proto_enumTypes[0].Descriptor()\n}\n\nfunc (Impact_Category) Type() protoreflect.EnumType {\n\treturn &file_google_cloud_recommender_v1beta1_recommendation_proto_enumTypes[0]\n}\n\nfunc (x Impact_Category) Number() protoreflect.EnumNumber {\n\treturn protoreflect.EnumNumber(x)\n}\n\n\/\/ Deprecated: Use Impact_Category.Descriptor instead.\nfunc (Impact_Category) EnumDescriptor() ([]byte, []int) {\n\treturn file_google_cloud_recommender_v1beta1_recommendation_proto_rawDescGZIP(), []int{6, 0}\n}\n\n\/\/ Represents Recommendation State.\ntype RecommendationStateInfo_State int32\n\nconst (\n\t\/\/ Default state. Don't use directly.\n\tRecommendationStateInfo_STATE_UNSPECIFIED RecommendationStateInfo_State = 0\n\t\/\/ Recommendation is active and can be applied. Recommendations content can\n\t\/\/ be updated by Google.\n\t\/\/\n\t\/\/ ACTIVE recommendations can be marked as CLAIMED, SUCCEEDED, or FAILED.\n\tRecommendationStateInfo_ACTIVE RecommendationStateInfo_State = 1\n\t\/\/ Recommendation is in claimed state. Recommendations content is\n\t\/\/ immutable and cannot be updated by Google.\n\t\/\/\n\t\/\/ CLAIMED recommendations can be marked as CLAIMED, SUCCEEDED, or FAILED.\n\tRecommendationStateInfo_CLAIMED RecommendationStateInfo_State = 6\n\t\/\/ Recommendation is in succeeded state. Recommendations content is\n\t\/\/ immutable and cannot be updated by Google.\n\t\/\/\n\t\/\/ SUCCEEDED recommendations can be marked as SUCCEEDED, or FAILED.\n\tRecommendationStateInfo_SUCCEEDED RecommendationStateInfo_State = 3\n\t\/\/ Recommendation is in failed state. Recommendations content is immutable\n\t\/\/ and cannot be updated by Google.\n\t\/\/\n\t\/\/ FAILED recommendations can be marked as SUCCEEDED, or FAILED.\n\tRecommendationStateInfo_FAILED RecommendationStateInfo_State = 4\n\t\/\/ Recommendation is in dismissed state. Recommendation content can be\n\t\/\/ updated by Google.\n\t\/\/\n\t\/\/ DISMISSED recommendations can be marked as ACTIVE.\n\tRecommendationStateInfo_DISMISSED RecommendationStateInfo_State = 5\n)\n\n\/\/ Enum value maps for RecommendationStateInfo_State.\nvar (\n\tRecommendationStateInfo_State_name = map[int32]string{\n\t\t0: \"STATE_UNSPECIFIED\",\n\t\t1: \"ACTIVE\",\n\t\t6: \"CLAIMED\",\n\t\t3: \"SUCCEEDED\",\n\t\t4: \"FAILED\",\n\t\t5: \"DISMISSED\",\n\t}\n\tRecommendationStateInfo_State_value = map[string]int32{\n\t\t\"STATE_UNSPECIFIED\": 0,\n\t\t\"ACTIVE\": 1,\n\t\t\"CLAIMED\": 6,\n\t\t\"SUCCEEDED\": 3,\n\t\t\"FAILED\": 4,\n\t\t\"DISMISSED\": 5,\n\t}\n)\n\nfunc (x RecommendationStateInfo_State) Enum() *RecommendationStateInfo_State {\n\tp := new(RecommendationStateInfo_State)\n\t*p = x\n\treturn p\n}\n\nfunc (x RecommendationStateInfo_State) String() string {\n\treturn protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x))\n}\n\nfunc (RecommendationStateInfo_State) Descriptor() protoreflect.EnumDescriptor {\n\treturn file_google_cloud_recommender_v1beta1_recommendation_proto_enumTypes[1].Descriptor()\n}\n\nfunc (RecommendationStateInfo_State) Type() protoreflect.EnumType {\n\treturn &file_google_cloud_recommender_v1beta1_recommendation_proto_enumTypes[1]\n}\n\nfunc (x RecommendationStateInfo_State) Number() protoreflect.EnumNumber {\n\treturn protoreflect.EnumNumber(x)\n}\n\n\/\/ Deprecated: Use RecommendationStateInfo_State.Descriptor instead.\nfunc (RecommendationStateInfo_State) EnumDescriptor() ([]byte, []int) {\n\treturn file_google_cloud_recommender_v1beta1_recommendation_proto_rawDescGZIP(), []int{7, 0}\n}\n\n\/\/ A recommendation along with a suggested action. E.g., a rightsizing\n\/\/ recommendation for an underutilized VM, IAM role recommendations, etc\ntype Recommendation struct {\n\tstate protoimpl.MessageState\n\tsizeCache protoimpl.SizeCache\n\tunknownFields protoimpl.UnknownFields\n\n\t\/\/ Name of recommendation.\n\tName string `protobuf:\"bytes,1,opt,name=name,proto3\" json:\"name,omitempty\"`\n\t\/\/ Free-form human readable summary in English. The maximum length is 500\n\t\/\/ characters.\n\tDescription string `protobuf:\"bytes,2,opt,name=description,proto3\" json:\"description,omitempty\"`\n\t\/\/ Contains an identifier for a subtype of recommendations produced for the\n\t\/\/ same recommender. Subtype is a function of content and impact, meaning a\n\t\/\/ new subtype might be added when significant changes to `content` or\n\t\/\/ `primary_impact.category` are introduced. See the Recommenders section\n\t\/\/ to see a list of subtypes for a given Recommender.\n\t\/\/\n\t\/\/ Examples:\n\t\/\/ For recommender = \"google.iam.policy.Recommender\",\n\t\/\/ recommender_subtype can be one of \"REMOVE_ROLE\"\/\"REPLACE_ROLE\"\n\tRecommenderSubtype string `protobuf:\"bytes,12,opt,name=recommender_subtype,json=recommenderSubtype,proto3\" json:\"recommender_subtype,omitempty\"`\n\t\/\/ Last time this recommendation was refreshed by the system that created it\n\t\/\/ in the first place.\n\tLastRefreshTime *timestamppb.Timestamp `protobuf:\"bytes,4,opt,name=last_refresh_time,json=lastRefreshTime,proto3\" json:\"last_refresh_time,omitempty\"`\n\t\/\/ The primary impact that this recommendation can have while trying to\n\t\/\/ optimize for one category.\n\tPrimaryImpact *Impact `protobuf:\"bytes,5,opt,name=primary_impact,json=primaryImpact,proto3\" json:\"primary_impact,omitempty\"`\n\t\/\/ Optional set of additional impact that this recommendation may have when\n\t\/\/ trying to optimize for the primary category. These may be positive\n\t\/\/ or negative.\n\tAdditionalImpact []*Impact `protobuf:\"bytes,6,rep,name=additional_impact,json=additionalImpact,proto3\" json:\"additional_impact,omitempty\"`\n\t\/\/ Content of the recommendation describing recommended changes to resources.\n\tContent *RecommendationContent `protobuf:\"bytes,7,opt,name=content,proto3\" json:\"content,omitempty\"`\n\t\/\/ Information for state. Contains state and metadata.\n\tStateInfo *RecommendationStateInfo `protobuf:\"bytes,10,opt,name=state_info,json=stateInfo,proto3\" json:\"state_info,omitempty\"`\n\t\/\/ Fingerprint of the Recommendation. Provides optimistic locking when\n\t\/\/ updating states.\n\tEtag string `protobuf:\"bytes,11,opt,name=etag,proto3\" json:\"etag,omitempty\"`\n\t\/\/ Insights that led to this recommendation.\n\tAssociatedInsights []*Recommendation_InsightReference `protobuf:\"bytes,14,rep,name=associated_insights,json=associatedInsights,proto3\" json:\"associated_insights,omitempty\"`\n}\n\nfunc (x *Recommendation) Reset() {\n\t*x = Recommendation{}\n\tif protoimpl.UnsafeEnabled {\n\t\tmi := &file_google_cloud_recommender_v1beta1_recommendation_proto_msgTypes[0]\n\t\tms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))\n\t\tms.StoreMessageInfo(mi)\n\t}\n}\n\nfunc (x *Recommendation) String() string {\n\treturn protoimpl.X.MessageStringOf(x)\n}\n\nfunc (*Recommendation) ProtoMessage() {}\n\nfunc (x *Recommendation) ProtoReflect() protoreflect.Message {\n\tmi := &file_google_cloud_recommender_v1beta1_recommendation_proto_msgTypes[0]\n\tif protoimpl.UnsafeEnabled && x != nil {\n\t\tms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))\n\t\tif ms.LoadMessageInfo() == nil {\n\t\t\tms.StoreMessageInfo(mi)\n\t\t}\n\t\treturn ms\n\t}\n\treturn mi.MessageOf(x)\n}\n\n\/\/ Deprecated: Use Recommendation.ProtoReflect.Descriptor instead.\nfunc (*Recommendation) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_recommender_v1beta1_recommendation_proto_rawDescGZIP(), []int{0}\n}\n\nfunc (x *Recommendation) GetName() string {\n\tif x != nil {\n\t\treturn x.Name\n\t}\n\treturn \"\"\n}\n\nfunc (x *Recommendation) GetDescription() string {\n\tif x != nil {\n\t\treturn x.Description\n\t}\n\treturn \"\"\n}\n\nfunc (x *Recommendation) GetRecommenderSubtype() string {\n\tif x != nil {\n\t\treturn x.RecommenderSubtype\n\t}\n\treturn \"\"\n}\n\nfunc (x *Recommendation) GetLastRefreshTime() *timestamppb.Timestamp {\n\tif x != nil {\n\t\treturn x.LastRefreshTime\n\t}\n\treturn nil\n}\n\nfunc (x *Recommendation) GetPrimaryImpact() *Impact {\n\tif x != nil {\n\t\treturn x.PrimaryImpact\n\t}\n\treturn nil\n}\n\nfunc (x *Recommendation) GetAdditionalImpact() []*Impact {\n\tif x != nil {\n\t\treturn x.AdditionalImpact\n\t}\n\treturn nil\n}\n\nfunc (x *Recommendation) GetContent() *RecommendationContent {\n\tif x != nil {\n\t\treturn x.Content\n\t}\n\treturn nil\n}\n\nfunc (x *Recommendation) GetStateInfo() *RecommendationStateInfo {\n\tif x != nil {\n\t\treturn x.StateInfo\n\t}\n\treturn nil\n}\n\nfunc (x *Recommendation) GetEtag() string {\n\tif x != nil {\n\t\treturn x.Etag\n\t}\n\treturn \"\"\n}\n\nfunc (x *Recommendation) GetAssociatedInsights() []*Recommendation_InsightReference {\n\tif x != nil {\n\t\treturn x.AssociatedInsights\n\t}\n\treturn nil\n}\n\n\/\/ Contains what resources are changing and how they are changing.\ntype RecommendationContent struct {\n\tstate protoimpl.MessageState\n\tsizeCache protoimpl.SizeCache\n\tunknownFields protoimpl.UnknownFields\n\n\t\/\/ Operations to one or more Google Cloud resources grouped in such a way\n\t\/\/ that, all operations within one group are expected to be performed\n\t\/\/ atomically and in an order.\n\tOperationGroups []*OperationGroup `protobuf:\"bytes,2,rep,name=operation_groups,json=operationGroups,proto3\" json:\"operation_groups,omitempty\"`\n}\n\nfunc (x *RecommendationContent) Reset() {\n\t*x = RecommendationContent{}\n\tif protoimpl.UnsafeEnabled {\n\t\tmi := &file_google_cloud_recommender_v1beta1_recommendation_proto_msgTypes[1]\n\t\tms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))\n\t\tms.StoreMessageInfo(mi)\n\t}\n}\n\nfunc (x *RecommendationContent) String() string {\n\treturn protoimpl.X.MessageStringOf(x)\n}\n\nfunc (*RecommendationContent) ProtoMessage() {}\n\nfunc (x *RecommendationContent) ProtoReflect() protoreflect.Message {\n\tmi := &file_google_cloud_recommender_v1beta1_recommendation_proto_msgTypes[1]\n\tif protoimpl.UnsafeEnabled && x != nil {\n\t\tms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))\n\t\tif ms.LoadMessageInfo() == nil {\n\t\t\tms.StoreMessageInfo(mi)\n\t\t}\n\t\treturn ms\n\t}\n\treturn mi.MessageOf(x)\n}\n\n\/\/ Deprecated: Use RecommendationContent.ProtoReflect.Descriptor instead.\nfunc (*RecommendationContent) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_recommender_v1beta1_recommendation_proto_rawDescGZIP(), []int{1}\n}\n\nfunc (x *RecommendationContent) GetOperationGroups() []*OperationGroup {\n\tif x != nil {\n\t\treturn x.OperationGroups\n\t}\n\treturn nil\n}\n\n\/\/ Group of operations that need to be performed atomically.\ntype OperationGroup struct {\n\tstate protoimpl.MessageState\n\tsizeCache protoimpl.SizeCache\n\tunknownFields protoimpl.UnknownFields\n\n\t\/\/ List of operations across one or more resources that belong to this group.\n\t\/\/ Loosely based on RFC6902 and should be performed in the order they appear.\n\tOperations []*Operation `protobuf:\"bytes,1,rep,name=operations,proto3\" json:\"operations,omitempty\"`\n}\n\nfunc (x *OperationGroup) Reset() {\n\t*x = OperationGroup{}\n\tif protoimpl.UnsafeEnabled {\n\t\tmi := &file_google_cloud_recommender_v1beta1_recommendation_proto_msgTypes[2]\n\t\tms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))\n\t\tms.StoreMessageInfo(mi)\n\t}\n}\n\nfunc (x *OperationGroup) String() string {\n\treturn protoimpl.X.MessageStringOf(x)\n}\n\nfunc (*OperationGroup) ProtoMessage() {}\n\nfunc (x *OperationGroup) ProtoReflect() protoreflect.Message {\n\tmi := &file_google_cloud_recommender_v1beta1_recommendation_proto_msgTypes[2]\n\tif protoimpl.UnsafeEnabled && x != nil {\n\t\tms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))\n\t\tif ms.LoadMessageInfo() == nil {\n\t\t\tms.StoreMessageInfo(mi)\n\t\t}\n\t\treturn ms\n\t}\n\treturn mi.MessageOf(x)\n}\n\n\/\/ Deprecated: Use OperationGroup.ProtoReflect.Descriptor instead.\nfunc (*OperationGroup) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_recommender_v1beta1_recommendation_proto_rawDescGZIP(), []int{2}\n}\n\nfunc (x *OperationGroup) GetOperations() []*Operation {\n\tif x != nil {\n\t\treturn x.Operations\n\t}\n\treturn nil\n}\n\n\/\/ Contains an operation for a resource loosely based on the JSON-PATCH format\n\/\/ with support for:\n\/\/\n\/\/ * Custom filters for describing partial array patch.\n\/\/ * Extended path values for describing nested arrays.\n\/\/ * Custom fields for describing the resource for which the operation is being\n\/\/ described.\n\/\/ * Allows extension to custom operations not natively supported by RFC6902.\n\/\/ See https:\/\/tools.ietf.org\/html\/rfc6902 for details on the original RFC.\ntype Operation struct {\n\tstate protoimpl.MessageState\n\tsizeCache protoimpl.SizeCache\n\tunknownFields protoimpl.UnknownFields\n\n\t\/\/ Type of this operation. Contains one of 'and', 'remove', 'replace', 'move',\n\t\/\/ 'copy', 'test' and 'custom' operations. This field is case-insensitive and\n\t\/\/ always populated.\n\tAction string `protobuf:\"bytes,1,opt,name=action,proto3\" json:\"action,omitempty\"`\n\t\/\/ Type of GCP resource being modified\/tested. This field is always populated.\n\t\/\/ Example: cloudresourcemanager.googleapis.com\/Project,\n\t\/\/ compute.googleapis.com\/Instance\n\tResourceType string `protobuf:\"bytes,2,opt,name=resource_type,json=resourceType,proto3\" json:\"resource_type,omitempty\"`\n\t\/\/ Contains the fully qualified resource name. This field is always populated.\n\t\/\/ ex: \/\/cloudresourcemanager.googleapis.com\/projects\/foo.\n\tResource string `protobuf:\"bytes,3,opt,name=resource,proto3\" json:\"resource,omitempty\"`\n\t\/\/ Path to the target field being operated on. If the operation is at the\n\t\/\/ resource level, then path should be \"\/\". This field is always populated.\n\tPath string `protobuf:\"bytes,4,opt,name=path,proto3\" json:\"path,omitempty\"`\n\t\/\/ Can be set with action 'copy' to copy resource configuration across\n\t\/\/ different resources of the same type. Example: A resource clone can be\n\t\/\/ done via action = 'copy', path = \"\/\", from = \"\/\",\n\t\/\/ source_resource = and resource_name = .\n\t\/\/ This field is empty for all other values of `action`.\n\tSourceResource string `protobuf:\"bytes,5,opt,name=source_resource,json=sourceResource,proto3\" json:\"source_resource,omitempty\"`\n\t\/\/ Can be set with action 'copy' or 'move' to indicate the source field within\n\t\/\/ resource or source_resource, ignored if provided for other operation types.\n\tSourcePath string `protobuf:\"bytes,6,opt,name=source_path,json=sourcePath,proto3\" json:\"source_path,omitempty\"`\n\t\/\/ One of the fields in the following block will be set and intend to\n\t\/\/ describe a value for 'path' field.\n\t\/\/\n\t\/\/ Types that are assignable to PathValue:\n\t\/\/\t*Operation_Value\n\t\/\/\t*Operation_ValueMatcher\n\tPathValue isOperation_PathValue `protobuf_oneof:\"path_value\"`\n\t\/\/ Set of filters to apply if `path` refers to array elements or nested array\n\t\/\/ elements in order to narrow down to a single unique element that is being\n\t\/\/ tested\/modified.\n\t\/\/ This is intended to be an exact match per filter. To perform advanced\n\t\/\/ matching, use path_value_matchers.\n\t\/\/\n\t\/\/ * Example: {\n\t\/\/ \"\/versions\/*\/name\" : \"it-123\"\n\t\/\/ \"\/versions\/*\/targetSize\/percent\": 20\n\t\/\/ }\n\t\/\/ * Example: {\n\t\/\/ \"\/bindings\/*\/role\": \"roles\/owner\"\n\t\/\/ \"\/bindings\/*\/condition\" : null\n\t\/\/ }\n\t\/\/ * Example: {\n\t\/\/ \"\/bindings\/*\/role\": \"roles\/owner\"\n\t\/\/ \"\/bindings\/*\/members\/*\" : [\"x@example.com\", \"y@example.com\"]\n\t\/\/ }\n\t\/\/ When both path_filters and path_value_matchers are set, an implicit AND\n\t\/\/ must be performed.\n\tPathFilters map[string]*structpb.Value `protobuf:\"bytes,8,rep,name=path_filters,json=pathFilters,proto3\" json:\"path_filters,omitempty\" protobuf_key:\"bytes,1,opt,name=key,proto3\" protobuf_val:\"bytes,2,opt,name=value,proto3\"`\n\t\/\/ Similar to path_filters, this contains set of filters to apply if `path`\n\t\/\/ field referes to array elements. This is meant to support value matching\n\t\/\/ beyond exact match. To perform exact match, use path_filters.\n\t\/\/ When both path_filters and path_value_matchers are set, an implicit AND\n\t\/\/ must be performed.\n\tPathValueMatchers map[string]*ValueMatcher `protobuf:\"bytes,11,rep,name=path_value_matchers,json=pathValueMatchers,proto3\" json:\"path_value_matchers,omitempty\" protobuf_key:\"bytes,1,opt,name=key,proto3\" protobuf_val:\"bytes,2,opt,name=value,proto3\"`\n}\n\nfunc (x *Operation) Reset() {\n\t*x = Operation{}\n\tif protoimpl.UnsafeEnabled {\n\t\tmi := &file_google_cloud_recommender_v1beta1_recommendation_proto_msgTypes[3]\n\t\tms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))\n\t\tms.StoreMessageInfo(mi)\n\t}\n}\n\nfunc (x *Operation) String() string {\n\treturn protoimpl.X.MessageStringOf(x)\n}\n\nfunc (*Operation) ProtoMessage() {}\n\nfunc (x *Operation) ProtoReflect() protoreflect.Message {\n\tmi := &file_google_cloud_recommender_v1beta1_recommendation_proto_msgTypes[3]\n\tif protoimpl.UnsafeEnabled && x != nil {\n\t\tms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))\n\t\tif ms.LoadMessageInfo() == nil {\n\t\t\tms.StoreMessageInfo(mi)\n\t\t}\n\t\treturn ms\n\t}\n\treturn mi.MessageOf(x)\n}\n\n\/\/ Deprecated: Use Operation.ProtoReflect.Descriptor instead.\nfunc (*Operation) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_recommender_v1beta1_recommendation_proto_rawDescGZIP(), []int{3}\n}\n\nfunc (x *Operation) GetAction() string {\n\tif x != nil {\n\t\treturn x.Action\n\t}\n\treturn \"\"\n}\n\nfunc (x *Operation) GetResourceType() string {\n\tif x != nil {\n\t\treturn x.ResourceType\n\t}\n\treturn \"\"\n}\n\nfunc (x *Operation) GetResource() string {\n\tif x != nil {\n\t\treturn x.Resource\n\t}\n\treturn \"\"\n}\n\nfunc (x *Operation) GetPath() string {\n\tif x != nil {\n\t\treturn x.Path\n\t}\n\treturn \"\"\n}\n\nfunc (x *Operation) GetSourceResource() string {\n\tif x != nil {\n\t\treturn x.SourceResource\n\t}\n\treturn \"\"\n}\n\nfunc (x *Operation) GetSourcePath() string {\n\tif x != nil {\n\t\treturn x.SourcePath\n\t}\n\treturn \"\"\n}\n\nfunc (m *Operation) GetPathValue() isOperation_PathValue {\n\tif m != nil {\n\t\treturn m.PathValue\n\t}\n\treturn nil\n}\n\nfunc (x *Operation) GetValue() *structpb.Value {\n\tif x, ok := x.GetPathValue().(*Operation_Value); ok {\n\t\treturn x.Value\n\t}\n\treturn nil\n}\n\nfunc (x *Operation) GetValueMatcher() *ValueMatcher {\n\tif x, ok := x.GetPathValue().(*Operation_ValueMatcher); ok {\n\t\treturn x.ValueMatcher\n\t}\n\treturn nil\n}\n\nfunc (x *Operation) GetPathFilters() map[string]*structpb.Value {\n\tif x != nil {\n\t\treturn x.PathFilters\n\t}\n\treturn nil\n}\n\nfunc (x *Operation) GetPathValueMatchers() map[string]*ValueMatcher {\n\tif x != nil {\n\t\treturn x.PathValueMatchers\n\t}\n\treturn nil\n}\n\ntype isOperation_PathValue interface {\n\tisOperation_PathValue()\n}\n\ntype Operation_Value struct {\n\t\/\/ Value for the `path` field. Will be set for actions:'add'\/'replace'.\n\t\/\/ Maybe set for action: 'test'. Either this or `value_matcher` will be set\n\t\/\/ for 'test' operation. An exact match must be performed.\n\tValue *structpb.Value `protobuf:\"bytes,7,opt,name=value,proto3,oneof\"`\n}\n\ntype Operation_ValueMatcher struct {\n\t\/\/ Can be set for action 'test' for advanced matching for the value of\n\t\/\/ 'path' field. Either this or `value` will be set for 'test' operation.\n\tValueMatcher *ValueMatcher `protobuf:\"bytes,10,opt,name=value_matcher,json=valueMatcher,proto3,oneof\"`\n}\n\nfunc (*Operation_Value) isOperation_PathValue() {}\n\nfunc (*Operation_ValueMatcher) isOperation_PathValue() {}\n\n\/\/ Contains various matching options for values for a GCP resource field.\ntype ValueMatcher struct {\n\tstate protoimpl.MessageState\n\tsizeCache protoimpl.SizeCache\n\tunknownFields protoimpl.UnknownFields\n\n\t\/\/ Types that are assignable to MatchVariant:\n\t\/\/\t*ValueMatcher_MatchesPattern\n\tMatchVariant isValueMatcher_MatchVariant `protobuf_oneof:\"match_variant\"`\n}\n\nfunc (x *ValueMatcher) Reset() {\n\t*x = ValueMatcher{}\n\tif protoimpl.UnsafeEnabled {\n\t\tmi := &file_google_cloud_recommender_v1beta1_recommendation_proto_msgTypes[4]\n\t\tms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))\n\t\tms.StoreMessageInfo(mi)\n\t}\n}\n\nfunc (x *ValueMatcher) String() string {\n\treturn protoimpl.X.MessageStringOf(x)\n}\n\nfunc (*ValueMatcher) ProtoMessage() {}\n\nfunc (x *ValueMatcher) ProtoReflect() protoreflect.Message {\n\tmi := &file_google_cloud_recommender_v1beta1_recommendation_proto_msgTypes[4]\n\tif protoimpl.UnsafeEnabled && x != nil {\n\t\tms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))\n\t\tif ms.LoadMessageInfo() == nil {\n\t\t\tms.StoreMessageInfo(mi)\n\t\t}\n\t\treturn ms\n\t}\n\treturn mi.MessageOf(x)\n}\n\n\/\/ Deprecated: Use ValueMatcher.ProtoReflect.Descriptor instead.\nfunc (*ValueMatcher) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_recommender_v1beta1_recommendation_proto_rawDescGZIP(), []int{4}\n}\n\nfunc (m *ValueMatcher) GetMatchVariant() isValueMatcher_MatchVariant {\n\tif m != nil {\n\t\treturn m.MatchVariant\n\t}\n\treturn nil\n}\n\nfunc (x *ValueMatcher) GetMatchesPattern() string {\n\tif x, ok := x.GetMatchVariant().(*ValueMatcher_MatchesPattern); ok {\n\t\treturn x.MatchesPattern\n\t}\n\treturn \"\"\n}\n\ntype isValueMatcher_MatchVariant interface {\n\tisValueMatcher_MatchVariant()\n}\n\ntype ValueMatcher_MatchesPattern struct {\n\t\/\/ To be used for full regex matching. The regular expression is using the\n\t\/\/ Google RE2 syntax (https:\/\/github.com\/google\/re2\/wiki\/Syntax), so to be\n\t\/\/ used with RE2::FullMatch\n\tMatchesPattern string `protobuf:\"bytes,1,opt,name=matches_pattern,json=matchesPattern,proto3,oneof\"`\n}\n\nfunc (*ValueMatcher_MatchesPattern) isValueMatcher_MatchVariant() {}\n\n\/\/ Contains metadata about how much money a recommendation can save or incur.\ntype CostProjection struct {\n\tstate protoimpl.MessageState\n\tsizeCache protoimpl.SizeCache\n\tunknownFields protoimpl.UnknownFields\n\n\t\/\/ An approximate projection on amount saved or amount incurred. Negative cost\n\t\/\/ units indicate cost savings and positive cost units indicate increase.\n\t\/\/ See google.type.Money documentation for positive\/negative units.\n\tCost *money.Money `protobuf:\"bytes,1,opt,name=cost,proto3\" json:\"cost,omitempty\"`\n\t\/\/ Duration for which this cost applies.\n\tDuration *durationpb.Duration `protobuf:\"bytes,2,opt,name=duration,proto3\" json:\"duration,omitempty\"`\n}\n\nfunc (x *CostProjection) Reset() {\n\t*x = CostProjection{}\n\tif protoimpl.UnsafeEnabled {\n\t\tmi := &file_google_cloud_recommender_v1beta1_recommendation_proto_msgTypes[5]\n\t\tms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))\n\t\tms.StoreMessageInfo(mi)\n\t}\n}\n\nfunc (x *CostProjection) String() string {\n\treturn protoimpl.X.MessageStringOf(x)\n}\n\nfunc (*CostProjection) ProtoMessage() {}\n\nfunc (x *CostProjection) ProtoReflect() protoreflect.Message {\n\tmi := &file_google_cloud_recommender_v1beta1_recommendation_proto_msgTypes[5]\n\tif protoimpl.UnsafeEnabled && x != nil {\n\t\tms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))\n\t\tif ms.LoadMessageInfo() == nil {\n\t\t\tms.StoreMessageInfo(mi)\n\t\t}\n\t\treturn ms\n\t}\n\treturn mi.MessageOf(x)\n}\n\n\/\/ Deprecated: Use CostProjection.ProtoReflect.Descriptor instead.\nfunc (*CostProjection) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_recommender_v1beta1_recommendation_proto_rawDescGZIP(), []int{5}\n}\n\nfunc (x *CostProjection) GetCost() *money.Money {\n\tif x != nil {\n\t\treturn x.Cost\n\t}\n\treturn nil\n}\n\nfunc (x *CostProjection) GetDuration() *durationpb.Duration {\n\tif x != nil {\n\t\treturn x.Duration\n\t}\n\treturn nil\n}\n\n\/\/ Contains the impact a recommendation can have for a given category.\ntype Impact struct {\n\tstate protoimpl.MessageState\n\tsizeCache protoimpl.SizeCache\n\tunknownFields protoimpl.UnknownFields\n\n\t\/\/ Category that is being targeted.\n\tCategory Impact_Category `protobuf:\"varint,1,opt,name=category,proto3,enum=google.cloud.recommender.v1beta1.Impact_Category\" json:\"category,omitempty\"`\n\t\/\/ Contains projections (if any) for this category.\n\t\/\/\n\t\/\/ Types that are assignable to Projection:\n\t\/\/\t*Impact_CostProjection\n\tProjection isImpact_Projection `protobuf_oneof:\"projection\"`\n}\n\nfunc (x *Impact) Reset() {\n\t*x = Impact{}\n\tif protoimpl.UnsafeEnabled {\n\t\tmi := &file_google_cloud_recommender_v1beta1_recommendation_proto_msgTypes[6]\n\t\tms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))\n\t\tms.StoreMessageInfo(mi)\n\t}\n}\n\nfunc (x *Impact) String() string {\n\treturn protoimpl.X.MessageStringOf(x)\n}\n\nfunc (*Impact) ProtoMessage() {}\n\nfunc (x *Impact) ProtoReflect() protoreflect.Message {\n\tmi := &file_google_cloud_recommender_v1beta1_recommendation_proto_msgTypes[6]\n\tif protoimpl.UnsafeEnabled && x != nil {\n\t\tms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))\n\t\tif ms.LoadMessageInfo() == nil {\n\t\t\tms.StoreMessageInfo(mi)\n\t\t}\n\t\treturn ms\n\t}\n\treturn mi.MessageOf(x)\n}\n\n\/\/ Deprecated: Use Impact.ProtoReflect.Descriptor instead.\nfunc (*Impact) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_recommender_v1beta1_recommendation_proto_rawDescGZIP(), []int{6}\n}\n\nfunc (x *Impact) GetCategory() Impact_Category {\n\tif x != nil {\n\t\treturn x.Category\n\t}\n\treturn Impact_CATEGORY_UNSPECIFIED\n}\n\nfunc (m *Impact) GetProjection() isImpact_Projection {\n\tif m != nil {\n\t\treturn m.Projection\n\t}\n\treturn nil\n}\n\nfunc (x *Impact) GetCostProjection() *CostProjection {\n\tif x, ok := x.GetProjection().(*Impact_CostProjection); ok {\n\t\treturn x.CostProjection\n\t}\n\treturn nil\n}\n\ntype isImpact_Projection interface {\n\tisImpact_Projection()\n}\n\ntype Impact_CostProjection struct {\n\t\/\/ Use with CategoryType.COST\n\tCostProjection *CostProjection `protobuf:\"bytes,100,opt,name=cost_projection,json=costProjection,proto3,oneof\"`\n}\n\nfunc (*Impact_CostProjection) isImpact_Projection() {}\n\n\/\/ Information for state. Contains state and metadata.\ntype RecommendationStateInfo struct {\n\tstate protoimpl.MessageState\n\tsizeCache protoimpl.SizeCache\n\tunknownFields protoimpl.UnknownFields\n\n\t\/\/ The state of the recommendation, Eg ACTIVE, SUCCEEDED, FAILED.\n\tState RecommendationStateInfo_State `protobuf:\"varint,1,opt,name=state,proto3,enum=google.cloud.recommender.v1beta1.RecommendationStateInfo_State\" json:\"state,omitempty\"`\n\t\/\/ A map of metadata for the state, provided by user or automations systems.\n\tStateMetadata map[string]string `protobuf:\"bytes,2,rep,name=state_metadata,json=stateMetadata,proto3\" json:\"state_metadata,omitempty\" protobuf_key:\"bytes,1,opt,name=key,proto3\" protobuf_val:\"bytes,2,opt,name=value,proto3\"`\n}\n\nfunc (x *RecommendationStateInfo) Reset() {\n\t*x = RecommendationStateInfo{}\n\tif protoimpl.UnsafeEnabled {\n\t\tmi := &file_google_cloud_recommender_v1beta1_recommendation_proto_msgTypes[7]\n\t\tms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))\n\t\tms.StoreMessageInfo(mi)\n\t}\n}\n\nfunc (x *RecommendationStateInfo) String() string {\n\treturn protoimpl.X.MessageStringOf(x)\n}\n\nfunc (*RecommendationStateInfo) ProtoMessage() {}\n\nfunc (x *RecommendationStateInfo) ProtoReflect() protoreflect.Message {\n\tmi := &file_google_cloud_recommender_v1beta1_recommendation_proto_msgTypes[7]\n\tif protoimpl.UnsafeEnabled && x != nil {\n\t\tms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))\n\t\tif ms.LoadMessageInfo() == nil {\n\t\t\tms.StoreMessageInfo(mi)\n\t\t}\n\t\treturn ms\n\t}\n\treturn mi.MessageOf(x)\n}\n\n\/\/ Deprecated: Use RecommendationStateInfo.ProtoReflect.Descriptor instead.\nfunc (*RecommendationStateInfo) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_recommender_v1beta1_recommendation_proto_rawDescGZIP(), []int{7}\n}\n\nfunc (x *RecommendationStateInfo) GetState() RecommendationStateInfo_State {\n\tif x != nil {\n\t\treturn x.State\n\t}\n\treturn RecommendationStateInfo_STATE_UNSPECIFIED\n}\n\nfunc (x *RecommendationStateInfo) GetStateMetadata() map[string]string {\n\tif x != nil {\n\t\treturn x.StateMetadata\n\t}\n\treturn nil\n}\n\n\/\/ Reference to an associated insight.\ntype Recommendation_InsightReference struct {\n\tstate protoimpl.MessageState\n\tsizeCache protoimpl.SizeCache\n\tunknownFields protoimpl.UnknownFields\n\n\t\/\/ Insight resource name, e.g.\n\t\/\/ projects\/[PROJECT_NUMBER]\/locations\/[LOCATION]\/insightTypes\/[INSIGHT_TYPE_ID]\/insights\/[INSIGHT_ID]\n\tInsight string `protobuf:\"bytes,1,opt,name=insight,proto3\" json:\"insight,omitempty\"`\n}\n\nfunc (x *Recommendation_InsightReference) Reset() {\n\t*x = Recommendation_InsightReference{}\n\tif protoimpl.UnsafeEnabled {\n\t\tmi := &file_google_cloud_recommender_v1beta1_recommendation_proto_msgTypes[8]\n\t\tms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))\n\t\tms.StoreMessageInfo(mi)\n\t}\n}\n\nfunc (x *Recommendation_InsightReference) String() string {\n\treturn protoimpl.X.MessageStringOf(x)\n}\n\nfunc (*Recommendation_InsightReference) ProtoMessage() {}\n\nfunc (x *Recommendation_InsightReference) ProtoReflect() protoreflect.Message {\n\tmi := &file_google_cloud_recommender_v1beta1_recommendation_proto_msgTypes[8]\n\tif protoimpl.UnsafeEnabled && x != nil {\n\t\tms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))\n\t\tif ms.LoadMessageInfo() == nil {\n\t\t\tms.StoreMessageInfo(mi)\n\t\t}\n\t\treturn ms\n\t}\n\treturn mi.MessageOf(x)\n}\n\n\/\/ Deprecated: Use Recommendation_InsightReference.ProtoReflect.Descriptor instead.\nfunc (*Recommendation_InsightReference) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_recommender_v1beta1_recommendation_proto_rawDescGZIP(), []int{0, 0}\n}\n\nfunc (x *Recommendation_InsightReference) GetInsight() string {\n\tif x != nil {\n\t\treturn x.Insight\n\t}\n\treturn \"\"\n}\n\nvar File_google_cloud_recommender_v1beta1_recommendation_proto protoreflect.FileDescriptor\n\nvar file_google_cloud_recommender_v1beta1_recommendation_proto_rawDesc = []byte{\n\t0x0a, 0x35, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x63, 0x6c, 0x6f, 0x75, 0x64, 0x2f, 0x72,\n\t0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74,\n\t0x61, 0x31, 0x2f, 0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64, 0x61, 0x74, 0x69, 0x6f,\n\t0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x20, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e,\n\t0x63, 0x6c, 0x6f, 0x75, 0x64, 0x2e, 0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64, 0x65,\n\t0x72, 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x1a, 0x19, 0x67, 0x6f, 0x6f, 0x67, 0x6c,\n\t0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x70,\n\t0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f,\n\t0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x64, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x70,\n\t0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f,\n\t0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x2e, 0x70, 0x72, 0x6f,\n\t0x74, 0x6f, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f,\n\t0x62, 0x75, 0x66, 0x2f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72,\n\t0x6f, 0x74, 0x6f, 0x1a, 0x17, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x74, 0x79, 0x70, 0x65,\n\t0x2f, 0x6d, 0x6f, 0x6e, 0x65, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0xa7, 0x09, 0x0a,\n\t0x0e, 0x52, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12,\n\t0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e,\n\t0x61, 0x6d, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69,\n\t0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69,\n\t0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x2f, 0x0a, 0x13, 0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65,\n\t0x6e, 0x64, 0x65, 0x72, 0x5f, 0x73, 0x75, 0x62, 0x74, 0x79, 0x70, 0x65, 0x18, 0x0c, 0x20, 0x01,\n\t0x28, 0x09, 0x52, 0x12, 0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x53,\n\t0x75, 0x62, 0x74, 0x79, 0x70, 0x65, 0x12, 0x46, 0x0a, 0x11, 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x72,\n\t0x65, 0x66, 0x72, 0x65, 0x73, 0x68, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28,\n\t0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f,\n\t0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0f, 0x6c,\n\t0x61, 0x73, 0x74, 0x52, 0x65, 0x66, 0x72, 0x65, 0x73, 0x68, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x4f,\n\t0x0a, 0x0e, 0x70, 0x72, 0x69, 0x6d, 0x61, 0x72, 0x79, 0x5f, 0x69, 0x6d, 0x70, 0x61, 0x63, 0x74,\n\t0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e,\n\t0x63, 0x6c, 0x6f, 0x75, 0x64, 0x2e, 0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64, 0x65,\n\t0x72, 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x49, 0x6d, 0x70, 0x61, 0x63, 0x74,\n\t0x52, 0x0d, 0x70, 0x72, 0x69, 0x6d, 0x61, 0x72, 0x79, 0x49, 0x6d, 0x70, 0x61, 0x63, 0x74, 0x12,\n\t0x55, 0x0a, 0x11, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x5f, 0x69, 0x6d,\n\t0x70, 0x61, 0x63, 0x74, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x67, 0x6f, 0x6f,\n\t0x67, 0x6c, 0x65, 0x2e, 0x63, 0x6c, 0x6f, 0x75, 0x64, 0x2e, 0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d,\n\t0x65, 0x6e, 0x64, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x49, 0x6d,\n\t0x70, 0x61, 0x63, 0x74, 0x52, 0x10, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c,\n\t0x49, 0x6d, 0x70, 0x61, 0x63, 0x74, 0x12, 0x51, 0x0a, 0x07, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e,\n\t0x74, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x37, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,\n\t0x2e, 0x63, 0x6c, 0x6f, 0x75, 0x64, 0x2e, 0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64,\n\t0x65, 0x72, 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x6d,\n\t0x6d, 0x65, 0x6e, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74,\n\t0x52, 0x07, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x12, 0x58, 0x0a, 0x0a, 0x73, 0x74, 0x61,\n\t0x74, 0x65, 0x5f, 0x69, 0x6e, 0x66, 0x6f, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x39, 0x2e,\n\t0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x63, 0x6c, 0x6f, 0x75, 0x64, 0x2e, 0x72, 0x65, 0x63,\n\t0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31,\n\t0x2e, 0x52, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x53,\n\t0x74, 0x61, 0x74, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x09, 0x73, 0x74, 0x61, 0x74, 0x65, 0x49,\n\t0x6e, 0x66, 0x6f, 0x12, 0x12, 0x0a, 0x04, 0x65, 0x74, 0x61, 0x67, 0x18, 0x0b, 0x20, 0x01, 0x28,\n\t0x09, 0x52, 0x04, 0x65, 0x74, 0x61, 0x67, 0x12, 0x72, 0x0a, 0x13, 0x61, 0x73, 0x73, 0x6f, 0x63,\n\t0x69, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x69, 0x6e, 0x73, 0x69, 0x67, 0x68, 0x74, 0x73, 0x18, 0x0e,\n\t0x20, 0x03, 0x28, 0x0b, 0x32, 0x41, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x63, 0x6c,\n\t0x6f, 0x75, 0x64, 0x2e, 0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x2e,\n\t0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e,\n\t0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x49, 0x6e, 0x73, 0x69, 0x67, 0x68, 0x74, 0x52, 0x65,\n\t0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x52, 0x12, 0x61, 0x73, 0x73, 0x6f, 0x63, 0x69, 0x61,\n\t0x74, 0x65, 0x64, 0x49, 0x6e, 0x73, 0x69, 0x67, 0x68, 0x74, 0x73, 0x1a, 0x2c, 0x0a, 0x10, 0x49,\n\t0x6e, 0x73, 0x69, 0x67, 0x68, 0x74, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x12,\n\t0x18, 0x0a, 0x07, 0x69, 0x6e, 0x73, 0x69, 0x67, 0x68, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09,\n\t0x52, 0x07, 0x69, 0x6e, 0x73, 0x69, 0x67, 0x68, 0x74, 0x3a, 0xda, 0x03, 0xea, 0x41, 0xd6, 0x03,\n\t0x0a, 0x29, 0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x2e, 0x67, 0x6f,\n\t0x6f, 0x67, 0x6c, 0x65, 0x61, 0x70, 0x69, 0x73, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x52, 0x65, 0x63,\n\t0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x63, 0x70, 0x72, 0x6f,\n\t0x6a, 0x65, 0x63, 0x74, 0x73, 0x2f, 0x7b, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x7d, 0x2f,\n\t0x6c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2f, 0x7b, 0x6c, 0x6f, 0x63, 0x61, 0x74,\n\t0x69, 0x6f, 0x6e, 0x7d, 0x2f, 0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64, 0x65, 0x72,\n\t0x73, 0x2f, 0x7b, 0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x7d, 0x2f,\n\t0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2f,\n\t0x7b, 0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x7d,\n\t0x12, 0x72, 0x62, 0x69, 0x6c, 0x6c, 0x69, 0x6e, 0x67, 0x41, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74,\n\t0x73, 0x2f, 0x7b, 0x62, 0x69, 0x6c, 0x6c, 0x69, 0x6e, 0x67, 0x5f, 0x61, 0x63, 0x63, 0x6f, 0x75,\n\t0x6e, 0x74, 0x7d, 0x2f, 0x6c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2f, 0x7b, 0x6c,\n\t0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x7d, 0x2f, 0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65,\n\t0x6e, 0x64, 0x65, 0x72, 0x73, 0x2f, 0x7b, 0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64,\n\t0x65, 0x72, 0x7d, 0x2f, 0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64, 0x61, 0x74, 0x69,\n\t0x6f, 0x6e, 0x73, 0x2f, 0x7b, 0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64, 0x61, 0x74,\n\t0x69, 0x6f, 0x6e, 0x7d, 0x12, 0x61, 0x66, 0x6f, 0x6c, 0x64, 0x65, 0x72, 0x73, 0x2f, 0x7b, 0x66,\n\t0x6f, 0x6c, 0x64, 0x65, 0x72, 0x7d, 0x2f, 0x6c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73,\n\t0x2f, 0x7b, 0x6c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x7d, 0x2f, 0x72, 0x65, 0x63, 0x6f,\n\t0x6d, 0x6d, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x73, 0x2f, 0x7b, 0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d,\n\t0x65, 0x6e, 0x64, 0x65, 0x72, 0x7d, 0x2f, 0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64,\n\t0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2f, 0x7b, 0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e,\n\t0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x7d, 0x12, 0x6d, 0x6f, 0x72, 0x67, 0x61, 0x6e, 0x69, 0x7a,\n\t0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2f, 0x7b, 0x6f, 0x72, 0x67, 0x61, 0x6e, 0x69, 0x7a, 0x61,\n\t0x74, 0x69, 0x6f, 0x6e, 0x7d, 0x2f, 0x6c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2f,\n\t0x7b, 0x6c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x7d, 0x2f, 0x72, 0x65, 0x63, 0x6f, 0x6d,\n\t0x6d, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x73, 0x2f, 0x7b, 0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65,\n\t0x6e, 0x64, 0x65, 0x72, 0x7d, 0x2f, 0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64, 0x61,\n\t0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2f, 0x7b, 0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64,\n\t0x61, 0x74, 0x69, 0x6f, 0x6e, 0x7d, 0x22, 0x74, 0x0a, 0x15, 0x52, 0x65, 0x63, 0x6f, 0x6d, 0x6d,\n\t0x65, 0x6e, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x12,\n\t0x5b, 0x0a, 0x10, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x67, 0x72, 0x6f,\n\t0x75, 0x70, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x67, 0x6f, 0x6f, 0x67,\n\t0x6c, 0x65, 0x2e, 0x63, 0x6c, 0x6f, 0x75, 0x64, 0x2e, 0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65,\n\t0x6e, 0x64, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x4f, 0x70, 0x65,\n\t0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x52, 0x0f, 0x6f, 0x70, 0x65,\n\t0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x73, 0x22, 0x5d, 0x0a, 0x0e,\n\t0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x12, 0x4b,\n\t0x0a, 0x0a, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x01, 0x20, 0x03,\n\t0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x63, 0x6c, 0x6f, 0x75,\n\t0x64, 0x2e, 0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x2e, 0x76, 0x31,\n\t0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52,\n\t0x0a, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x22, 0xfa, 0x05, 0x0a, 0x09,\n\t0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x16, 0x0a, 0x06, 0x61, 0x63, 0x74,\n\t0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x61, 0x63, 0x74, 0x69, 0x6f,\n\t0x6e, 0x12, 0x23, 0x0a, 0x0d, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x74, 0x79,\n\t0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72,\n\t0x63, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72,\n\t0x63, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72,\n\t0x63, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09,\n\t0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x12, 0x27, 0x0a, 0x0f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65,\n\t0x5f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52,\n\t0x0e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12,\n\t0x1f, 0x0a, 0x0b, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x06,\n\t0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x50, 0x61, 0x74, 0x68,\n\t0x12, 0x2e, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32,\n\t0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75,\n\t0x66, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x48, 0x00, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65,\n\t0x12, 0x55, 0x0a, 0x0d, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x5f, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x65,\n\t0x72, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2e, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,\n\t0x2e, 0x63, 0x6c, 0x6f, 0x75, 0x64, 0x2e, 0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64,\n\t0x65, 0x72, 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65,\n\t0x4d, 0x61, 0x74, 0x63, 0x68, 0x65, 0x72, 0x48, 0x00, 0x52, 0x0c, 0x76, 0x61, 0x6c, 0x75, 0x65,\n\t0x4d, 0x61, 0x74, 0x63, 0x68, 0x65, 0x72, 0x12, 0x5f, 0x0a, 0x0c, 0x70, 0x61, 0x74, 0x68, 0x5f,\n\t0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x3c, 0x2e,\n\t0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x63, 0x6c, 0x6f, 0x75, 0x64, 0x2e, 0x72, 0x65, 0x63,\n\t0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31,\n\t0x2e, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x50, 0x61, 0x74, 0x68, 0x46,\n\t0x69, 0x6c, 0x74, 0x65, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0b, 0x70, 0x61, 0x74,\n\t0x68, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x73, 0x12, 0x72, 0x0a, 0x13, 0x70, 0x61, 0x74, 0x68,\n\t0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x5f, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x65, 0x72, 0x73, 0x18,\n\t0x0b, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x42, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x63,\n\t0x6c, 0x6f, 0x75, 0x64, 0x2e, 0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64, 0x65, 0x72,\n\t0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69,\n\t0x6f, 0x6e, 0x2e, 0x50, 0x61, 0x74, 0x68, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x4d, 0x61, 0x74, 0x63,\n\t0x68, 0x65, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x11, 0x70, 0x61, 0x74, 0x68, 0x56,\n\t0x61, 0x6c, 0x75, 0x65, 0x4d, 0x61, 0x74, 0x63, 0x68, 0x65, 0x72, 0x73, 0x1a, 0x56, 0x0a, 0x10,\n\t0x50, 0x61, 0x74, 0x68, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79,\n\t0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b,\n\t0x65, 0x79, 0x12, 0x2c, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28,\n\t0x0b, 0x32, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f,\n\t0x62, 0x75, 0x66, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65,\n\t0x3a, 0x02, 0x38, 0x01, 0x1a, 0x74, 0x0a, 0x16, 0x50, 0x61, 0x74, 0x68, 0x56, 0x61, 0x6c, 0x75,\n\t0x65, 0x4d, 0x61, 0x74, 0x63, 0x68, 0x65, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10,\n\t0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79,\n\t0x12, 0x44, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32,\n\t0x2e, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x63, 0x6c, 0x6f, 0x75, 0x64, 0x2e, 0x72,\n\t0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74,\n\t0x61, 0x31, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x4d, 0x61, 0x74, 0x63, 0x68, 0x65, 0x72, 0x52,\n\t0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x42, 0x0c, 0x0a, 0x0a, 0x70, 0x61,\n\t0x74, 0x68, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x4a, 0x0a, 0x0c, 0x56, 0x61, 0x6c, 0x75,\n\t0x65, 0x4d, 0x61, 0x74, 0x63, 0x68, 0x65, 0x72, 0x12, 0x29, 0x0a, 0x0f, 0x6d, 0x61, 0x74, 0x63,\n\t0x68, 0x65, 0x73, 0x5f, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28,\n\t0x09, 0x48, 0x00, 0x52, 0x0e, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x65, 0x73, 0x50, 0x61, 0x74, 0x74,\n\t0x65, 0x72, 0x6e, 0x42, 0x0f, 0x0a, 0x0d, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x5f, 0x76, 0x61, 0x72,\n\t0x69, 0x61, 0x6e, 0x74, 0x22, 0x6f, 0x0a, 0x0e, 0x43, 0x6f, 0x73, 0x74, 0x50, 0x72, 0x6f, 0x6a,\n\t0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x26, 0x0a, 0x04, 0x63, 0x6f, 0x73, 0x74, 0x18, 0x01,\n\t0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x74, 0x79,\n\t0x70, 0x65, 0x2e, 0x4d, 0x6f, 0x6e, 0x65, 0x79, 0x52, 0x04, 0x63, 0x6f, 0x73, 0x74, 0x12, 0x35,\n\t0x0a, 0x08, 0x64, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b,\n\t0x32, 0x19, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62,\n\t0x75, 0x66, 0x2e, 0x44, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x08, 0x64, 0x75, 0x72,\n\t0x61, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0xa4, 0x02, 0x0a, 0x06, 0x49, 0x6d, 0x70, 0x61, 0x63, 0x74,\n\t0x12, 0x4d, 0x0a, 0x08, 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x18, 0x01, 0x20, 0x01,\n\t0x28, 0x0e, 0x32, 0x31, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x63, 0x6c, 0x6f, 0x75,\n\t0x64, 0x2e, 0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x2e, 0x76, 0x31,\n\t0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x49, 0x6d, 0x70, 0x61, 0x63, 0x74, 0x2e, 0x43, 0x61, 0x74,\n\t0x65, 0x67, 0x6f, 0x72, 0x79, 0x52, 0x08, 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x12,\n\t0x5b, 0x0a, 0x0f, 0x63, 0x6f, 0x73, 0x74, 0x5f, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x69,\n\t0x6f, 0x6e, 0x18, 0x64, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c,\n\t0x65, 0x2e, 0x63, 0x6c, 0x6f, 0x75, 0x64, 0x2e, 0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e,\n\t0x64, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x43, 0x6f, 0x73, 0x74,\n\t0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x0e, 0x63, 0x6f,\n\t0x73, 0x74, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x60, 0x0a, 0x08,\n\t0x43, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x12, 0x18, 0x0a, 0x14, 0x43, 0x41, 0x54, 0x45,\n\t0x47, 0x4f, 0x52, 0x59, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44,\n\t0x10, 0x00, 0x12, 0x08, 0x0a, 0x04, 0x43, 0x4f, 0x53, 0x54, 0x10, 0x01, 0x12, 0x0c, 0x0a, 0x08,\n\t0x53, 0x45, 0x43, 0x55, 0x52, 0x49, 0x54, 0x59, 0x10, 0x02, 0x12, 0x0f, 0x0a, 0x0b, 0x50, 0x45,\n\t0x52, 0x46, 0x4f, 0x52, 0x4d, 0x41, 0x4e, 0x43, 0x45, 0x10, 0x03, 0x12, 0x11, 0x0a, 0x0d, 0x4d,\n\t0x41, 0x4e, 0x41, 0x47, 0x45, 0x41, 0x42, 0x49, 0x4c, 0x49, 0x54, 0x59, 0x10, 0x04, 0x42, 0x0c,\n\t0x0a, 0x0a, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x8a, 0x03, 0x0a,\n\t0x17, 0x52, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x53,\n\t0x74, 0x61, 0x74, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x55, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74,\n\t0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x3f, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,\n\t0x2e, 0x63, 0x6c, 0x6f, 0x75, 0x64, 0x2e, 0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64,\n\t0x65, 0x72, 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x6d,\n\t0x6d, 0x65, 0x6e, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x49, 0x6e,\n\t0x66, 0x6f, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x12,\n\t0x73, 0x0a, 0x0e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74,\n\t0x61, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x4c, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,\n\t0x2e, 0x63, 0x6c, 0x6f, 0x75, 0x64, 0x2e, 0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64,\n\t0x65, 0x72, 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x6d,\n\t0x6d, 0x65, 0x6e, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x49, 0x6e,\n\t0x66, 0x6f, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61,\n\t0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0d, 0x73, 0x74, 0x61, 0x74, 0x65, 0x4d, 0x65, 0x74, 0x61,\n\t0x64, 0x61, 0x74, 0x61, 0x1a, 0x40, 0x0a, 0x12, 0x53, 0x74, 0x61, 0x74, 0x65, 0x4d, 0x65, 0x74,\n\t0x61, 0x64, 0x61, 0x74, 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65,\n\t0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05,\n\t0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c,\n\t0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x61, 0x0a, 0x05, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12,\n\t0x15, 0x0a, 0x11, 0x53, 0x54, 0x41, 0x54, 0x45, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49,\n\t0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x0a, 0x0a, 0x06, 0x41, 0x43, 0x54, 0x49, 0x56, 0x45,\n\t0x10, 0x01, 0x12, 0x0b, 0x0a, 0x07, 0x43, 0x4c, 0x41, 0x49, 0x4d, 0x45, 0x44, 0x10, 0x06, 0x12,\n\t0x0d, 0x0a, 0x09, 0x53, 0x55, 0x43, 0x43, 0x45, 0x45, 0x44, 0x45, 0x44, 0x10, 0x03, 0x12, 0x0a,\n\t0x0a, 0x06, 0x46, 0x41, 0x49, 0x4c, 0x45, 0x44, 0x10, 0x04, 0x12, 0x0d, 0x0a, 0x09, 0x44, 0x49,\n\t0x53, 0x4d, 0x49, 0x53, 0x53, 0x45, 0x44, 0x10, 0x05, 0x42, 0xf2, 0x03, 0x0a, 0x24, 0x63, 0x6f,\n\t0x6d, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x63, 0x6c, 0x6f, 0x75, 0x64, 0x2e, 0x72,\n\t0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74,\n\t0x61, 0x31, 0x50, 0x01, 0x5a, 0x4b, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x67, 0x6f, 0x6c,\n\t0x61, 0x6e, 0x67, 0x2e, 0x6f, 0x72, 0x67, 0x2f, 0x67, 0x65, 0x6e, 0x70, 0x72, 0x6f, 0x74, 0x6f,\n\t0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x63, 0x6c, 0x6f, 0x75,\n\t0x64, 0x2f, 0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x2f, 0x76, 0x31,\n\t0x62, 0x65, 0x74, 0x61, 0x31, 0x3b, 0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64, 0x65,\n\t0x72, 0xa2, 0x02, 0x04, 0x43, 0x52, 0x45, 0x43, 0xaa, 0x02, 0x20, 0x47, 0x6f, 0x6f, 0x67, 0x6c,\n\t0x65, 0x2e, 0x43, 0x6c, 0x6f, 0x75, 0x64, 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e,\n\t0x64, 0x65, 0x72, 0x2e, 0x56, 0x31, 0x42, 0x65, 0x74, 0x61, 0x31, 0xea, 0x41, 0xcf, 0x02, 0x0a,\n\t0x26, 0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x2e, 0x67, 0x6f, 0x6f,\n\t0x67, 0x6c, 0x65, 0x61, 0x70, 0x69, 0x73, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x52, 0x65, 0x63, 0x6f,\n\t0x6d, 0x6d, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x12, 0x42, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74,\n\t0x73, 0x2f, 0x7b, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x7d, 0x2f, 0x6c, 0x6f, 0x63, 0x61,\n\t0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2f, 0x7b, 0x6c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x7d,\n\t0x2f, 0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x73, 0x2f, 0x7b, 0x72,\n\t0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x7d, 0x12, 0x51, 0x62, 0x69, 0x6c,\n\t0x6c, 0x69, 0x6e, 0x67, 0x41, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x73, 0x2f, 0x7b, 0x62, 0x69,\n\t0x6c, 0x6c, 0x69, 0x6e, 0x67, 0x5f, 0x61, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x7d, 0x2f, 0x6c,\n\t0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2f, 0x7b, 0x6c, 0x6f, 0x63, 0x61, 0x74, 0x69,\n\t0x6f, 0x6e, 0x7d, 0x2f, 0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x73,\n\t0x2f, 0x7b, 0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x7d, 0x12, 0x40,\n\t0x66, 0x6f, 0x6c, 0x64, 0x65, 0x72, 0x73, 0x2f, 0x7b, 0x66, 0x6f, 0x6c, 0x64, 0x65, 0x72, 0x7d,\n\t0x2f, 0x6c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2f, 0x7b, 0x6c, 0x6f, 0x63, 0x61,\n\t0x74, 0x69, 0x6f, 0x6e, 0x7d, 0x2f, 0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64, 0x65,\n\t0x72, 0x73, 0x2f, 0x7b, 0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x7d,\n\t0x12, 0x4c, 0x6f, 0x72, 0x67, 0x61, 0x6e, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2f,\n\t0x7b, 0x6f, 0x72, 0x67, 0x61, 0x6e, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x7d, 0x2f, 0x6c,\n\t0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2f, 0x7b, 0x6c, 0x6f, 0x63, 0x61, 0x74, 0x69,\n\t0x6f, 0x6e, 0x7d, 0x2f, 0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x73,\n\t0x2f, 0x7b, 0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x7d, 0x62, 0x06,\n\t0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,\n}\n\nvar (\n\tfile_google_cloud_recommender_v1beta1_recommendation_proto_rawDescOnce sync.Once\n\tfile_google_cloud_recommender_v1beta1_recommendation_proto_rawDescData = file_google_cloud_recommender_v1beta1_recommendation_proto_rawDesc\n)\n\nfunc file_google_cloud_recommender_v1beta1_recommendation_proto_rawDescGZIP() []byte {\n\tfile_google_cloud_recommender_v1beta1_recommendation_proto_rawDescOnce.Do(func() {\n\t\tfile_google_cloud_recommender_v1beta1_recommendation_proto_rawDescData = protoimpl.X.CompressGZIP(file_google_cloud_recommender_v1beta1_recommendation_proto_rawDescData)\n\t})\n\treturn file_google_cloud_recommender_v1beta1_recommendation_proto_rawDescData\n}\n\nvar file_google_cloud_recommender_v1beta1_recommendation_proto_enumTypes = make([]protoimpl.EnumInfo, 2)\nvar file_google_cloud_recommender_v1beta1_recommendation_proto_msgTypes = make([]protoimpl.MessageInfo, 12)\nvar file_google_cloud_recommender_v1beta1_recommendation_proto_goTypes = []interface{}{\n\t(Impact_Category)(0), \/\/ 0: google.cloud.recommender.v1beta1.Impact.Category\n\t(RecommendationStateInfo_State)(0), \/\/ 1: google.cloud.recommender.v1beta1.RecommendationStateInfo.State\n\t(*Recommendation)(nil), \/\/ 2: google.cloud.recommender.v1beta1.Recommendation\n\t(*RecommendationContent)(nil), \/\/ 3: google.cloud.recommender.v1beta1.RecommendationContent\n\t(*OperationGroup)(nil), \/\/ 4: google.cloud.recommender.v1beta1.OperationGroup\n\t(*Operation)(nil), \/\/ 5: google.cloud.recommender.v1beta1.Operation\n\t(*ValueMatcher)(nil), \/\/ 6: google.cloud.recommender.v1beta1.ValueMatcher\n\t(*CostProjection)(nil), \/\/ 7: google.cloud.recommender.v1beta1.CostProjection\n\t(*Impact)(nil), \/\/ 8: google.cloud.recommender.v1beta1.Impact\n\t(*RecommendationStateInfo)(nil), \/\/ 9: google.cloud.recommender.v1beta1.RecommendationStateInfo\n\t(*Recommendation_InsightReference)(nil), \/\/ 10: google.cloud.recommender.v1beta1.Recommendation.InsightReference\n\tnil, \/\/ 11: google.cloud.recommender.v1beta1.Operation.PathFiltersEntry\n\tnil, \/\/ 12: google.cloud.recommender.v1beta1.Operation.PathValueMatchersEntry\n\tnil, \/\/ 13: google.cloud.recommender.v1beta1.RecommendationStateInfo.StateMetadataEntry\n\t(*timestamppb.Timestamp)(nil), \/\/ 14: google.protobuf.Timestamp\n\t(*structpb.Value)(nil), \/\/ 15: google.protobuf.Value\n\t(*money.Money)(nil), \/\/ 16: google.type.Money\n\t(*durationpb.Duration)(nil), \/\/ 17: google.protobuf.Duration\n}\nvar file_google_cloud_recommender_v1beta1_recommendation_proto_depIdxs = []int32{\n\t14, \/\/ 0: google.cloud.recommender.v1beta1.Recommendation.last_refresh_time:type_name -> google.protobuf.Timestamp\n\t8, \/\/ 1: google.cloud.recommender.v1beta1.Recommendation.primary_impact:type_name -> google.cloud.recommender.v1beta1.Impact\n\t8, \/\/ 2: google.cloud.recommender.v1beta1.Recommendation.additional_impact:type_name -> google.cloud.recommender.v1beta1.Impact\n\t3, \/\/ 3: google.cloud.recommender.v1beta1.Recommendation.content:type_name -> google.cloud.recommender.v1beta1.RecommendationContent\n\t9, \/\/ 4: google.cloud.recommender.v1beta1.Recommendation.state_info:type_name -> google.cloud.recommender.v1beta1.RecommendationStateInfo\n\t10, \/\/ 5: google.cloud.recommender.v1beta1.Recommendation.associated_insights:type_name -> google.cloud.recommender.v1beta1.Recommendation.InsightReference\n\t4, \/\/ 6: google.cloud.recommender.v1beta1.RecommendationContent.operation_groups:type_name -> google.cloud.recommender.v1beta1.OperationGroup\n\t5, \/\/ 7: google.cloud.recommender.v1beta1.OperationGroup.operations:type_name -> google.cloud.recommender.v1beta1.Operation\n\t15, \/\/ 8: google.cloud.recommender.v1beta1.Operation.value:type_name -> google.protobuf.Value\n\t6, \/\/ 9: google.cloud.recommender.v1beta1.Operation.value_matcher:type_name -> google.cloud.recommender.v1beta1.ValueMatcher\n\t11, \/\/ 10: google.cloud.recommender.v1beta1.Operation.path_filters:type_name -> google.cloud.recommender.v1beta1.Operation.PathFiltersEntry\n\t12, \/\/ 11: google.cloud.recommender.v1beta1.Operation.path_value_matchers:type_name -> google.cloud.recommender.v1beta1.Operation.PathValueMatchersEntry\n\t16, \/\/ 12: google.cloud.recommender.v1beta1.CostProjection.cost:type_name -> google.type.Money\n\t17, \/\/ 13: google.cloud.recommender.v1beta1.CostProjection.duration:type_name -> google.protobuf.Duration\n\t0, \/\/ 14: google.cloud.recommender.v1beta1.Impact.category:type_name -> google.cloud.recommender.v1beta1.Impact.Category\n\t7, \/\/ 15: google.cloud.recommender.v1beta1.Impact.cost_projection:type_name -> google.cloud.recommender.v1beta1.CostProjection\n\t1, \/\/ 16: google.cloud.recommender.v1beta1.RecommendationStateInfo.state:type_name -> google.cloud.recommender.v1beta1.RecommendationStateInfo.State\n\t13, \/\/ 17: google.cloud.recommender.v1beta1.RecommendationStateInfo.state_metadata:type_name -> google.cloud.recommender.v1beta1.RecommendationStateInfo.StateMetadataEntry\n\t15, \/\/ 18: google.cloud.recommender.v1beta1.Operation.PathFiltersEntry.value:type_name -> google.protobuf.Value\n\t6, \/\/ 19: google.cloud.recommender.v1beta1.Operation.PathValueMatchersEntry.value:type_name -> google.cloud.recommender.v1beta1.ValueMatcher\n\t20, \/\/ [20:20] is the sub-list for method output_type\n\t20, \/\/ [20:20] is the sub-list for method input_type\n\t20, \/\/ [20:20] is the sub-list for extension type_name\n\t20, \/\/ [20:20] is the sub-list for extension extendee\n\t0, \/\/ [0:20] is the sub-list for field type_name\n}\n\nfunc init() { file_google_cloud_recommender_v1beta1_recommendation_proto_init() }\nfunc file_google_cloud_recommender_v1beta1_recommendation_proto_init() {\n\tif File_google_cloud_recommender_v1beta1_recommendation_proto != nil {\n\t\treturn\n\t}\n\tif !protoimpl.UnsafeEnabled {\n\t\tfile_google_cloud_recommender_v1beta1_recommendation_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {\n\t\t\tswitch v := v.(*Recommendation); i {\n\t\t\tcase 0:\n\t\t\t\treturn &v.state\n\t\t\tcase 1:\n\t\t\t\treturn &v.sizeCache\n\t\t\tcase 2:\n\t\t\t\treturn &v.unknownFields\n\t\t\tdefault:\n\t\t\t\treturn nil\n\t\t\t}\n\t\t}\n\t\tfile_google_cloud_recommender_v1beta1_recommendation_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {\n\t\t\tswitch v := v.(*RecommendationContent); i {\n\t\t\tcase 0:\n\t\t\t\treturn &v.state\n\t\t\tcase 1:\n\t\t\t\treturn &v.sizeCache\n\t\t\tcase 2:\n\t\t\t\treturn &v.unknownFields\n\t\t\tdefault:\n\t\t\t\treturn nil\n\t\t\t}\n\t\t}\n\t\tfile_google_cloud_recommender_v1beta1_recommendation_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {\n\t\t\tswitch v := v.(*OperationGroup); i {\n\t\t\tcase 0:\n\t\t\t\treturn &v.state\n\t\t\tcase 1:\n\t\t\t\treturn &v.sizeCache\n\t\t\tcase 2:\n\t\t\t\treturn &v.unknownFields\n\t\t\tdefault:\n\t\t\t\treturn nil\n\t\t\t}\n\t\t}\n\t\tfile_google_cloud_recommender_v1beta1_recommendation_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} {\n\t\t\tswitch v := v.(*Operation); i {\n\t\t\tcase 0:\n\t\t\t\treturn &v.state\n\t\t\tcase 1:\n\t\t\t\treturn &v.sizeCache\n\t\t\tcase 2:\n\t\t\t\treturn &v.unknownFields\n\t\t\tdefault:\n\t\t\t\treturn nil\n\t\t\t}\n\t\t}\n\t\tfile_google_cloud_recommender_v1beta1_recommendation_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} {\n\t\t\tswitch v := v.(*ValueMatcher); i {\n\t\t\tcase 0:\n\t\t\t\treturn &v.state\n\t\t\tcase 1:\n\t\t\t\treturn &v.sizeCache\n\t\t\tcase 2:\n\t\t\t\treturn &v.unknownFields\n\t\t\tdefault:\n\t\t\t\treturn nil\n\t\t\t}\n\t\t}\n\t\tfile_google_cloud_recommender_v1beta1_recommendation_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} {\n\t\t\tswitch v := v.(*CostProjection); i {\n\t\t\tcase 0:\n\t\t\t\treturn &v.state\n\t\t\tcase 1:\n\t\t\t\treturn &v.sizeCache\n\t\t\tcase 2:\n\t\t\t\treturn &v.unknownFields\n\t\t\tdefault:\n\t\t\t\treturn nil\n\t\t\t}\n\t\t}\n\t\tfile_google_cloud_recommender_v1beta1_recommendation_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} {\n\t\t\tswitch v := v.(*Impact); i {\n\t\t\tcase 0:\n\t\t\t\treturn &v.state\n\t\t\tcase 1:\n\t\t\t\treturn &v.sizeCache\n\t\t\tcase 2:\n\t\t\t\treturn &v.unknownFields\n\t\t\tdefault:\n\t\t\t\treturn nil\n\t\t\t}\n\t\t}\n\t\tfile_google_cloud_recommender_v1beta1_recommendation_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} {\n\t\t\tswitch v := v.(*RecommendationStateInfo); i {\n\t\t\tcase 0:\n\t\t\t\treturn &v.state\n\t\t\tcase 1:\n\t\t\t\treturn &v.sizeCache\n\t\t\tcase 2:\n\t\t\t\treturn &v.unknownFields\n\t\t\tdefault:\n\t\t\t\treturn nil\n\t\t\t}\n\t\t}\n\t\tfile_google_cloud_recommender_v1beta1_recommendation_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} {\n\t\t\tswitch v := v.(*Recommendation_InsightReference); i {\n\t\t\tcase 0:\n\t\t\t\treturn &v.state\n\t\t\tcase 1:\n\t\t\t\treturn &v.sizeCache\n\t\t\tcase 2:\n\t\t\t\treturn &v.unknownFields\n\t\t\tdefault:\n\t\t\t\treturn nil\n\t\t\t}\n\t\t}\n\t}\n\tfile_google_cloud_recommender_v1beta1_recommendation_proto_msgTypes[3].OneofWrappers = []interface{}{\n\t\t(*Operation_Value)(nil),\n\t\t(*Operation_ValueMatcher)(nil),\n\t}\n\tfile_google_cloud_recommender_v1beta1_recommendation_proto_msgTypes[4].OneofWrappers = []interface{}{\n\t\t(*ValueMatcher_MatchesPattern)(nil),\n\t}\n\tfile_google_cloud_recommender_v1beta1_recommendation_proto_msgTypes[6].OneofWrappers = []interface{}{\n\t\t(*Impact_CostProjection)(nil),\n\t}\n\ttype x struct{}\n\tout := protoimpl.TypeBuilder{\n\t\tFile: protoimpl.DescBuilder{\n\t\t\tGoPackagePath: reflect.TypeOf(x{}).PkgPath(),\n\t\t\tRawDescriptor: file_google_cloud_recommender_v1beta1_recommendation_proto_rawDesc,\n\t\t\tNumEnums: 2,\n\t\t\tNumMessages: 12,\n\t\t\tNumExtensions: 0,\n\t\t\tNumServices: 0,\n\t\t},\n\t\tGoTypes: file_google_cloud_recommender_v1beta1_recommendation_proto_goTypes,\n\t\tDependencyIndexes: file_google_cloud_recommender_v1beta1_recommendation_proto_depIdxs,\n\t\tEnumInfos: file_google_cloud_recommender_v1beta1_recommendation_proto_enumTypes,\n\t\tMessageInfos: file_google_cloud_recommender_v1beta1_recommendation_proto_msgTypes,\n\t}.Build()\n\tFile_google_cloud_recommender_v1beta1_recommendation_proto = out.File\n\tfile_google_cloud_recommender_v1beta1_recommendation_proto_rawDesc = nil\n\tfile_google_cloud_recommender_v1beta1_recommendation_proto_goTypes = nil\n\tfile_google_cloud_recommender_v1beta1_recommendation_proto_depIdxs = nil\n}\n","avg_line_length":46.0007142857,"max_line_length":249,"alphanum_fraction":0.722286921} +{"size":1156,"ext":"go","lang":"Go","max_stars_count":null,"content":"\/\/ @Description: \u96ea\u82b1\u7b97\u6cd5\n\/\/ @Author: wucongxing\n\/\/ @Date:2022\/3\/8 15:24\n\npackage Tool\n\nimport (\n\t\"errors\"\n\t\"sync\"\n\t\"time\"\n)\n\nconst (\n\tworkerBits uint8 = 10\n\tnumberBits uint8 = 12\n\tworkerMax int64 = -1 ^ (-1 << workerBits)\n\tnumberMax int64 = -1 ^ (-1 << numberBits)\n\ttimeShift uint8 = workerBits + numberBits\n\tworkerShift uint8 = numberBits\n\tstartTime int64 = 1525705533000 \/\/ \u5982\u679c\u5728\u7a0b\u5e8f\u8dd1\u4e86\u4e00\u6bb5\u65f6\u95f4\u4fee\u6539\u4e86epoch\u8fd9\u4e2a\u503c \u53ef\u80fd\u4f1a\u5bfc\u81f4\u751f\u6210\u76f8\u540c\u7684ID\n)\n\ntype Worker struct {\n\tmu sync.Mutex\n\ttimestamp int64\n\tworkerId int64\n\tnumber int64\n}\n\nfunc NewWorker(workerId int64) (*Worker, error) {\n\tif workerId < 0 || workerId > workerMax {\n\t\treturn nil, errors.New(\"Worker ID excess of quantity\")\n\t}\n\t\/\/ \u751f\u6210\u4e00\u4e2a\u65b0\u8282\u70b9\n\treturn &Worker{\n\t\ttimestamp: 0,\n\t\tworkerId: workerId,\n\t\tnumber: 0,\n\t}, nil\n}\n\nfunc (w *Worker) GetId() int64 {\n\tw.mu.Lock()\n\tdefer w.mu.Unlock()\n\tnow := time.Now().UnixNano() \/ 1e6\n\tif w.timestamp == now {\n\t\tw.number++\n\t\tif w.number > numberMax {\n\t\t\tfor now <= w.timestamp {\n\t\t\t\tnow = time.Now().UnixNano() \/ 1e6\n\t\t\t}\n\t\t}\n\t} else {\n\t\tw.number = 0\n\t\tw.timestamp = now\n\t}\n\tID := int64((now-startTime)< 0 {\n\t\tmultipartBoundarySize = t.MultipartBoundarySize\n\t} else {\n\t\tmultipartBoundarySize = constants.DefaultMultipartBoundarySize\n\t}\n\n\terr = check(ctx)\n\tif err != nil {\n\t\tlogrus.Errorf(\"Pre migrate check failed for %v.\", err)\n\t\treturn\n\t}\n\n\treturn run(ctx)\n}\n\nfunc check(ctx context.Context) (err error) {\n\t\/\/ Initialize source.\n\tswitch t.Src.Type {\n\tcase constants.EndpointAliyun:\n\t\tsrc, err = aliyun.New(ctx, constants.SourceEndpoint, contexts.Client)\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\tcase constants.EndpointAzblob:\n\t\tsrc, err = azblob.New(ctx, constants.SourceEndpoint, contexts.Client)\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\tcase constants.EndpointFileList:\n\t\tsrc, err = filelist.New(ctx, constants.SourceEndpoint)\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\tcase constants.EndpointFs:\n\t\tsrc, err = fs.New(ctx, constants.SourceEndpoint)\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\tcase constants.EndpointGCS:\n\t\tsrc, err = gcs.New(ctx, constants.SourceEndpoint, contexts.Client)\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\tcase constants.EndpointHDFS:\n\t\tsrc, err = hdfs.New(ctx, constants.SourceEndpoint, contexts.Client)\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\tcase constants.EndpointQingStor:\n\t\tsrc, err = qingstor.New(ctx, constants.SourceEndpoint, contexts.Client)\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\tcase constants.EndpointQiniu:\n\t\tsrc, err = qiniu.New(ctx, constants.SourceEndpoint, contexts.Client)\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\tcase constants.EndpointS3:\n\t\tsrc, err = s3.New(ctx, constants.SourceEndpoint, contexts.Client)\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\tcase constants.EndpointUpyun:\n\t\tsrc, err = upyun.New(ctx, constants.SourceEndpoint, contexts.Client)\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\tcase constants.EndpointCOS:\n\t\tsrc, err = cos.New(ctx, constants.SourceEndpoint, contexts.Client)\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\tdefault:\n\t\tlogrus.Errorf(\"Type %s is not supported.\", t.Src.Type)\n\t\terr = constants.ErrEndpointNotSupported\n\t\treturn\n\t}\n\n\t\/\/ Initialize destination.\n\tswitch t.Dst.Type {\n\tcase constants.EndpointQingStor:\n\t\tdst, err = qingstor.New(ctx, constants.DestinationEndpoint, contexts.Client)\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\tcase constants.EndpointFs:\n\t\tdst, err = fs.New(ctx, constants.DestinationEndpoint)\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\tcase constants.EndpointS3:\n\t\tdst, err = s3.New(ctx, constants.DestinationEndpoint, contexts.Client)\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\tdefault:\n\t\tlogrus.Errorf(\"Type %s is not supported.\", t.Src.Type)\n\t\terr = constants.ErrEndpointNotSupported\n\t\treturn\n\t}\n\n\treturn\n}\n\n\/\/ run will execute task.\nfunc run(ctx context.Context) (err error) {\n\t\/\/ Check if task has been finished.\n\tif t.Status == constants.TaskStatusFinished {\n\t\tlogrus.Infof(\"Task %s has been finished, skip.\", t.Name)\n\t\treturn\n\t}\n\n\tswitch t.Type {\n\tcase constants.TaskTypeCopy:\n\t\tt.Handle = copyObject\n\t\terr = copyTask(ctx)\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\tcase constants.TaskTypeDelete:\n\t\tt.Handle = deleteObject\n\t\terr = deleteTask(ctx)\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\tcase constants.TaskTypeFetch:\n\t\tt.Handle = fetchObject\n\t\terr = fetchTask(ctx)\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\tdefault:\n\t\tlogrus.Errorf(\"Task %s's type %s is not supported.\", t.Name, t.Type)\n\t\treturn\n\t}\n\n\t\/\/ Update task status.\n\tt.Status = constants.TaskStatusFinished\n\terr = t.Save(ctx)\n\tif err != nil {\n\t\tlogrus.Errorf(\"Task %s save failed for %v.\", t.Name, err)\n\t\treturn\n\t}\n\n\tlogrus.Infof(\"Task %s has been finished.\", t.Name)\n\treturn\n}\n\n\/\/ migrateWorker will only do migrate work.\nfunc migrateWorker(ctx context.Context) {\n\tdefer owg.Done()\n\tdefer utils.Recover()\n\n\tfor o := range oc {\n\t\tok, err := checkObject(ctx, o)\n\t\tif err != nil {\n\t\t\tlogrus.Errorf(\"Check object failed for %v.\", err)\n\t\t\tcontinue\n\t\t}\n\t\tif ok {\n\t\t\terr = model.DeleteObject(ctx, o)\n\t\t\tif err != nil {\n\t\t\t\tutils.CheckClosedDB(err)\n\t\t\t}\n\t\t\tcontinue\n\t\t}\n\n\t\t\/\/ Object may be tried in three times.\n\t\tbo := backoff.NewExponentialBackOff()\n\t\tbo.Multiplier = 2.0\n\t\tbo.MaxElapsedTime = 2 * time.Second\n\n\t\terr = backoff.Retry(func() error {\n\t\t\terr = t.Handle(ctx, o)\n\t\t\tif err == nil {\n\t\t\t\treturn nil\n\t\t\t}\n\n\t\t\tlogrus.Infof(\"%s object failed for %v, retried.\", t.Type, err)\n\t\t\treturn err\n\t\t}, bo)\n\t\tif err != nil {\n\t\t\tlogrus.Errorf(\"%s object failed for %v.\", t.Type, err)\n\t\t\tcontinue\n\t\t}\n\n\t\terr = model.DeleteObject(ctx, o)\n\t\tif err != nil {\n\t\t\tutils.CheckClosedDB(err)\n\t\t\tcontinue\n\t\t}\n\t}\n}\n\n\/\/ isFinished will check whether current task has been finished.\nfunc isFinished(ctx context.Context) bool {\n\th, err := model.HasDirectoryObject(ctx)\n\tif err != nil {\n\t\tlogrus.Panic(err)\n\t}\n\tif h {\n\t\tlogrus.Infof(\"There are not finished directory objects.\")\n\t\treturn false\n\t}\n\n\th, err = model.HasSingleObject(ctx)\n\tif err != nil {\n\t\tlogrus.Panic(err)\n\t}\n\tif h {\n\t\tlogrus.Infof(\"There are not finished single objects.\")\n\t\treturn false\n\t}\n\n\th, err = model.HasPartialObject(ctx)\n\tif err != nil {\n\t\tlogrus.Panic(err)\n\t}\n\tif h {\n\t\tlogrus.Infof(\"There are not finished partial objects.\")\n\t\treturn false\n\t}\n\n\treturn true\n}\n","avg_line_length":23.8983050847,"max_line_length":78,"alphanum_fraction":0.6737588652} +{"size":2204,"ext":"go","lang":"Go","max_stars_count":1.0,"content":"package metadata\n\nimport (\n\t\"fmt\"\n\n\tpaginator \"github.com\/pilagod\/gorm-cursor-paginator\"\n\n\t\"github.com\/jinzhu\/gorm\"\n\n\t\"github.com\/pasientskyhosting\/plik\/server\/common\"\n)\n\n\/\/ CreateToken create a new token in DB\nfunc (b *Backend) CreateToken(token *common.Token) (err error) {\n\treturn b.db.Create(token).Error\n}\n\n\/\/ GetToken return a token from the DB ( return nil and non error if not found )\nfunc (b *Backend) GetToken(tokenStr string) (token *common.Token, err error) {\n\ttoken = &common.Token{}\n\terr = b.db.Where(&common.Token{Token: tokenStr}).Take(token).Error\n\tif gorm.IsRecordNotFoundError(err) {\n\t\treturn nil, nil\n\t} else if err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn token, err\n}\n\n\/\/ GetTokens return all tokens for a user\nfunc (b *Backend) GetTokens(userID string, pagingQuery *common.PagingQuery) (tokens []*common.Token, cursor *paginator.Cursor, err error) {\n\tstmt := b.db.Model(&common.Token{}).Where(&common.Token{UserID: userID})\n\n\tp := pagingQuery.Paginator()\n\tp.SetKeys(\"CreatedAt\", \"Token\")\n\n\terr = p.Paginate(stmt, &tokens).Error\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\tc := p.GetNextCursor()\n\treturn tokens, &c, err\n}\n\n\/\/ DeleteToken remove a token from the DB\nfunc (b *Backend) DeleteToken(tokenStr string) (deleted bool, err error) {\n\n\t\/\/ Delete token\n\tresult := b.db.Delete(&common.Token{Token: tokenStr})\n\tif result.Error != nil {\n\t\treturn false, fmt.Errorf(\"unable to delete token metadata\")\n\t}\n\n\treturn result.RowsAffected > 0, err\n}\n\n\/\/ CountUserTokens count how many token a user has\nfunc (b *Backend) CountUserTokens(userID string) (count int, err error) {\n\terr = b.db.Model(&common.Token{}).Where(&common.Token{UserID: userID}).Count(&count).Error\n\tif err != nil {\n\t\treturn -1, err\n\t}\n\n\treturn count, nil\n}\n\n\/\/ ForEachToken execute f for every token in the database\nfunc (b *Backend) ForEachToken(f func(token *common.Token) error) (err error) {\n\tstmt := b.db.Model(&common.Token{})\n\n\trows, err := stmt.Rows()\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer func() { _ = rows.Close() }()\n\n\tfor rows.Next() {\n\t\ttoken := &common.Token{}\n\t\terr = b.db.ScanRows(rows, token)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\terr = f(token)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn nil\n}\n","avg_line_length":23.6989247312,"max_line_length":139,"alphanum_fraction":0.6842105263} +{"size":13669,"ext":"go","lang":"Go","max_stars_count":null,"content":"\/\/\n\/\/ The approach for providing VM policy-based integration has the\n\/\/ following design constraints:\n\/\/ - Validation must never block updating the data model.\n\/\/ - Real-time validation is best effort.\n\/\/ - A scheduled search for VMs that needs to be validated\n\/\/ ensures that all VMs eventually get validated.\n\/\/ Real-time validation is triggered by VM create\/update model events.\n\/\/ If the validation service is unavailable or fails, the condition\n\/\/ is only logged with the intent that the next scheduled search will\n\/\/ validate the latest version of VM.\n\/\/ The scheduled search is a goroutine that periodically queries the\n\/\/ DB for VMs with: revision != revisionValidated. Each matched VM\n\/\/ is validated. To reduce overlap between the scheduled validation\n\/\/ and event-driven validation, Each model event is \"reported\" (though\n\/\/ a channel) to the search (loop). Reported are omitted from the search result.\n\/\/ Both Cluster and Host model events result in all of the VMs in their respective\n\/\/ containment trees will be updated with: revisionValidated = 0 which triggers\n\/\/ (re)validation.\n\/\/\npackage ovirt\n\nimport (\n\t\"context\"\n\t\"github.com\/go-logr\/logr\"\n\tliberr \"github.com\/konveyor\/controller\/pkg\/error\"\n\tlibmodel \"github.com\/konveyor\/controller\/pkg\/inventory\/model\"\n\tapi \"github.com\/konveyor\/forklift-controller\/pkg\/apis\/forklift\/v1beta1\"\n\trefapi \"github.com\/konveyor\/forklift-controller\/pkg\/apis\/forklift\/v1beta1\/ref\"\n\tmodel \"github.com\/konveyor\/forklift-controller\/pkg\/controller\/provider\/model\/ovirt\"\n\tweb \"github.com\/konveyor\/forklift-controller\/pkg\/controller\/provider\/web\/ovirt\"\n\t\"github.com\/konveyor\/forklift-controller\/pkg\/controller\/validation\/policy\"\n\t\"github.com\/konveyor\/forklift-controller\/pkg\/settings\"\n\t\"time\"\n)\n\n\/\/\n\/\/ The (max) number of batched task results.\nconst (\n\tMaxBatch = 1024\n)\n\n\/\/\n\/\/ Endpoints.\nconst (\n\tBaseEndpoint = \"\/v1\/data\/io\/konveyor\/forklift\/ovirt\/\"\n\tVersionEndpoint = BaseEndpoint + \"rules_version\"\n\tValidationEndpoint = BaseEndpoint + \"validate\"\n)\n\n\/\/\n\/\/ Application settings.\nvar Settings = &settings.Settings\n\n\/\/\n\/\/ Reported model event.\ntype ReportedEvent struct {\n\t\/\/ VM id.\n\tid string\n\t\/\/ VM revision.\n\trevision int64\n}\n\n\/\/\n\/\/ Watch for VM changes and validate as needed.\ntype VMEventHandler struct {\n\tlibmodel.StockEventHandler\n\t\/\/ Provider.\n\tProvider *api.Provider\n\t\/\/ DB.\n\tDB libmodel.DB\n\t\/\/ Reported VM events.\n\tinput chan ReportedEvent\n\t\/\/ Reported VM IDs.\n\treported map[string]int64\n\t\/\/ Last search.\n\tlastSearch time.Time\n\t\/\/ Logger.\n\tlog logr.Logger\n\t\/\/ Context\n\tcontext context.Context\n\t\/\/ Context cancel.\n\tcancel context.CancelFunc\n\t\/\/ Task result\n\ttaskResult chan *policy.Task\n}\n\n\/\/\n\/\/ Reset.\nfunc (r *VMEventHandler) reset() {\n\tr.reported = map[string]int64{}\n\tr.lastSearch = time.Now()\n}\n\n\/\/\n\/\/ Watch ended.\nfunc (r *VMEventHandler) Started(uint64) {\n\tr.log.Info(\"Started.\")\n\tr.taskResult = make(chan *policy.Task)\n\tr.input = make(chan ReportedEvent)\n\tr.context, r.cancel = context.WithCancel(context.Background())\n\tgo r.run()\n\tgo r.harvest()\n}\n\n\/\/\n\/\/ VM Created.\n\/\/ The VM is scheduled (and reported as scheduled).\n\/\/ This is best-effort. If the validate() fails, it wil be\n\/\/ picked up in the next search().\nfunc (r *VMEventHandler) Created(event libmodel.Event) {\n\tif r.canceled() {\n\t\treturn\n\t}\n\tif vm, cast := event.Model.(*model.VM); cast {\n\t\tif !vm.Validated() {\n\t\t\tif r.validate(vm) == nil {\n\t\t\t\tr.report(vm)\n\t\t\t}\n\t\t}\n\t}\n}\n\n\/\/\n\/\/ VM Updated.\n\/\/ The VM is scheduled (and reported as scheduled).\n\/\/ This is best-effort. If the validate() fails, it wil be\n\/\/ picked up in the next search().\nfunc (r *VMEventHandler) Updated(event libmodel.Event) {\n\tif r.canceled() {\n\t\treturn\n\t}\n\tif vm, cast := event.Updated.(*model.VM); cast {\n\t\tif !vm.Validated() {\n\t\t\tif r.validate(vm) == nil {\n\t\t\t\tr.report(vm)\n\t\t\t}\n\t\t}\n\t}\n}\n\n\/\/\n\/\/ Report errors.\nfunc (r *VMEventHandler) Error(err error) {\n\tr.log.Error(liberr.Wrap(err), err.Error())\n}\n\n\/\/\n\/\/ Watch ended.\nfunc (r *VMEventHandler) End() {\n\tr.log.Info(\"Ended.\")\n\tr.cancel()\n\tclose(r.input)\n\tclose(r.taskResult)\n}\n\n\/\/\n\/\/ Report model event.\nfunc (r *VMEventHandler) report(vm *model.VM) {\n\tdefer func() {\n\t\t_ = recover()\n\t}()\n\tr.input <- ReportedEvent{\n\t\trevision: vm.Revision,\n\t\tid: vm.ID,\n\t}\n}\n\n\/\/\n\/\/ Run.\n\/\/ Periodically search for VMs that need to be validated.\nfunc (r *VMEventHandler) run() {\n\tr.log.Info(\"Run started.\")\n\tdefer r.log.Info(\"Run stopped.\")\n\tinterval := time.Second * time.Duration(\n\t\tSettings.PolicyAgent.SearchInterval)\n\tr.list()\n\tr.reset()\n\tfor {\n\t\tselect {\n\t\tcase <-time.After(interval):\n\t\tcase reportedEvent, open := <-r.input:\n\t\t\tif open {\n\t\t\t\tr.reported[reportedEvent.id] = reportedEvent.revision\n\t\t\t} else {\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t\tif time.Since(r.lastSearch) > interval {\n\t\t\tr.list()\n\t\t\tr.reset()\n\t\t}\n\t}\n}\n\n\/\/\n\/\/ Harvest validation task results and update VMs.\n\/\/ Collect completed tasks in batches. Apply the batch\n\/\/ to VMs when one of:\n\/\/ - The batch is full.\n\/\/ - No tasks have been received within\n\/\/ the delay period.\nfunc (r *VMEventHandler) harvest() {\n\tr.log.Info(\"Harvest started.\")\n\tdefer r.log.Info(\"Harvest stopped.\")\n\tlong := time.Hour\n\tshort := time.Second\n\tdelay := long\n\tbatch := []*policy.Task{}\n\tmark := time.Now()\n\tfor {\n\t\tselect {\n\t\tcase <-time.After(delay):\n\t\tcase task, open := <-r.taskResult:\n\t\t\tif open {\n\t\t\t\tbatch = append(batch, task)\n\t\t\t\tdelay = short\n\t\t\t} else {\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t\tif time.Since(mark) > delay || len(batch) > MaxBatch {\n\t\t\tr.validated(batch)\n\t\t\tbatch = []*policy.Task{}\n\t\t\tdelay = long\n\t\t\tmark = time.Now()\n\t\t}\n\t}\n}\n\n\/\/\n\/\/ List for VMs to be validated.\n\/\/ VMs that have been reported through the model event\n\/\/ watch are ignored.\nfunc (r *VMEventHandler) list() {\n\tr.log.V(3).Info(\"List VMs that need to be validated.\")\n\tversion, err := policy.Agent.Version(VersionEndpoint)\n\tif err != nil {\n\t\tr.log.Error(err, err.Error())\n\t\treturn\n\t}\n\tif r.canceled() {\n\t\treturn\n\t}\n\titr, err := r.DB.Find(\n\t\t&model.VM{},\n\t\tlibmodel.ListOptions{\n\t\t\tPredicate: libmodel.Or(\n\t\t\t\tlibmodel.Neq(\"Revision\", libmodel.Field{Name: \"RevisionValidated\"}),\n\t\t\t\tlibmodel.Neq(\"PolicyVersion\", version)),\n\t\t})\n\tif err != nil {\n\t\tr.log.Error(err, \"List VM failed.\")\n\t\treturn\n\t}\n\tif itr.Len() > 0 {\n\t\tr.log.V(3).Info(\n\t\t\t\"List (unvalidated) VMs found.\",\n\t\t\t\"count\",\n\t\t\titr.Len())\n\t}\n\tfor {\n\t\tvm := &model.VM{}\n\t\thasNext := itr.NextWith(vm)\n\t\tif !hasNext || r.canceled() {\n\t\t\tbreak\n\t\t}\n\t\tif revision, found := r.reported[vm.ID]; found {\n\t\t\tif vm.Revision == revision {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t}\n\t\t_ = r.validate(vm)\n\t}\n}\n\n\/\/\n\/\/ Handler canceled.\nfunc (r *VMEventHandler) canceled() bool {\n\tselect {\n\tcase <-r.context.Done():\n\t\treturn true\n\tdefault:\n\t\treturn false\n\t}\n}\n\n\/\/\n\/\/ Analyze the VM.\nfunc (r *VMEventHandler) validate(vm *model.VM) (err error) {\n\ttask := &policy.Task{\n\t\tPath: ValidationEndpoint,\n\t\tContext: r.context,\n\t\tWorkload: r.workload,\n\t\tResult: r.taskResult,\n\t\tRevision: vm.Revision,\n\t\tRef: refapi.Ref{\n\t\t\tID: vm.ID,\n\t\t},\n\t}\n\tr.log.V(4).Info(\n\t\t\"Validate VM.\",\n\t\t\"vmID\",\n\t\tvm.ID)\n\terr = policy.Agent.Submit(task)\n\tif err != nil {\n\t\tr.log.Error(err, \"VM task (submit) failed.\")\n\t}\n\n\treturn\n}\n\n\/\/\n\/\/ VMs validated.\nfunc (r *VMEventHandler) validated(batch []*policy.Task) {\n\tif len(batch) == 0 {\n\t\treturn\n\t}\n\tr.log.V(3).Info(\n\t\t\"VM (batch) completed.\",\n\t\t\"count\",\n\t\tlen(batch))\n\ttx, err := r.DB.Begin()\n\tif err != nil {\n\t\tr.log.Error(err, \"Begin tx failed.\")\n\t\treturn\n\t}\n\tdefer func() {\n\t\t_ = tx.End()\n\t}()\n\tfor _, task := range batch {\n\t\tif task.Error != nil {\n\t\t\tr.log.Error(\n\t\t\t\ttask.Error, \"VM validation failed.\")\n\t\t\tcontinue\n\t\t}\n\t\tlatest := &model.VM{Base: model.Base{ID: task.Ref.ID}}\n\t\terr = tx.Get(latest)\n\t\tif err != nil {\n\t\t\tr.log.Error(err, \"VM (get) failed.\")\n\t\t\tcontinue\n\t\t}\n\t\tif task.Revision != latest.Revision {\n\t\t\tcontinue\n\t\t}\n\t\tlatest.PolicyVersion = task.Version\n\t\tlatest.RevisionValidated = latest.Revision\n\t\tlatest.Concerns = task.Concerns\n\t\tlatest.Revision--\n\t\terr = tx.Update(latest)\n\t\tif err != nil {\n\t\t\tr.log.Error(err, \"VM update failed.\")\n\t\t\tcontinue\n\t\t}\n\t\tr.log.V(4).Info(\n\t\t\t\"VM validated.\",\n\t\t\t\"vmID\",\n\t\t\tlatest.ID,\n\t\t\t\"revision\",\n\t\t\tlatest.Revision,\n\t\t\t\"duration\",\n\t\t\ttask.Duration())\n\t}\n\terr = tx.Commit()\n\tif err != nil {\n\t\tr.log.Error(err, \"Tx commit failed.\")\n\t\treturn\n\t}\n}\n\n\/\/\n\/\/ Build the workload.\nfunc (r *VMEventHandler) workload(vmID string) (object interface{}, err error) {\n\tvm := &model.VM{\n\t\tBase: model.Base{ID: vmID},\n\t}\n\terr = r.DB.Get(vm)\n\tif err != nil {\n\t\treturn\n\t}\n\tworkload := web.Workload{}\n\tworkload.With(vm)\n\terr = workload.Expand(r.DB)\n\tif err != nil {\n\t\treturn\n\t}\n\n\tworkload.Link(r.Provider)\n\tobject = workload\n\n\treturn\n}\n\n\/\/\n\/\/ Watch for cluster changes and validate as needed.\ntype ClusterEventHandler struct {\n\tlibmodel.StockEventHandler\n\t\/\/ DB.\n\tDB libmodel.DB\n\t\/\/ Logger.\n\tlog logr.Logger\n}\n\n\/\/\n\/\/ Cluster updated.\n\/\/ Analyze all related VMs.\nfunc (r *ClusterEventHandler) Updated(event libmodel.Event) {\n\tcluster, cast := event.Model.(*model.Cluster)\n\tif cast {\n\t\tr.validate(cluster)\n\t}\n}\n\n\/\/\n\/\/ Report errors.\nfunc (r *ClusterEventHandler) Error(err error) {\n\tr.log.Error(liberr.Wrap(err), err.Error())\n}\n\n\/\/\n\/\/ Analyze all of the VMs related to the cluster.\nfunc (r *ClusterEventHandler) validate(cluster *model.Cluster) {\n\tlist := []model.Host{}\n\terr := r.DB.List(\n\t\t&list,\n\t\tmodel.ListOptions{\n\t\t\tPredicate: libmodel.Eq(\"cluster\", cluster.ID),\n\t\t})\n\tif err != nil {\n\t\tr.log.Error(err, \"list Host failed.\")\n\t\treturn\n\t}\n\tfor _, host := range list {\n\t\thostHandler := HostEventHandler{DB: r.DB}\n\t\thostHandler.validate(&host)\n\t}\n}\n\n\/\/\n\/\/ Watch for host changes and validate as needed.\ntype HostEventHandler struct {\n\tlibmodel.StockEventHandler\n\t\/\/ DB.\n\tDB libmodel.DB\n\t\/\/ Logger.\n\tlog logr.Logger\n}\n\n\/\/\n\/\/ Host updated.\n\/\/ Analyze all related VMs.\nfunc (r *HostEventHandler) Updated(event libmodel.Event) {\n\thost, cast := event.Model.(*model.Host)\n\tif cast {\n\t\tr.validate(host)\n\t}\n}\n\n\/\/\n\/\/ Report errors.\nfunc (r *HostEventHandler) Error(err error) {\n\tr.log.Error(liberr.Wrap(err), err.Error())\n}\n\n\/\/\n\/\/ Analyze all of the VMs related to the host.\nfunc (r *HostEventHandler) validate(host *model.Host) {\n\ttx, err := r.DB.Begin()\n\tif err != nil {\n\t\tr.log.Error(err, \"begin tx failed.\")\n\t\treturn\n\t}\n\tdefer func() {\n\t\t_ = tx.End()\n\t}()\n\tlist := []model.VM{}\n\terr = tx.List(\n\t\t&list,\n\t\tmodel.ListOptions{\n\t\t\tPredicate: libmodel.Eq(\"host\", host.ID),\n\t\t})\n\tif err != nil {\n\t\tr.log.Error(err, \"list VM failed.\")\n\t\treturn\n\t}\n\tfor _, vm := range list {\n\t\tvm.RevisionValidated = 0\n\t\terr = tx.Update(&vm)\n\t\tif err != nil {\n\t\t\tr.log.Error(err, \"VM (update) failed.\")\n\t\t\treturn\n\t\t}\n\t}\n\terr = tx.Commit()\n\tif err != nil {\n\t\tr.log.Error(err, \"Tx commit failed.\")\n\t\treturn\n\t}\n}\n\n\/\/\n\/\/ Watch for NICProfile changes and validate VMs as needed.\ntype NICProfileHandler struct {\n\tlibmodel.StockEventHandler\n\t\/\/ DB.\n\tDB libmodel.DB\n\t\/\/ Logger.\n\tlog logr.Logger\n}\n\n\/\/\n\/\/ Profile updated.\n\/\/ Analyze all referencing VMs.\nfunc (r *NICProfileHandler) Updated(event libmodel.Event) {\n\tprofile, cast := event.Model.(*model.NICProfile)\n\tif cast {\n\t\tr.validate(profile)\n\t}\n}\n\n\/\/\n\/\/ Report errors.\nfunc (r *NICProfileHandler) Error(err error) {\n\tr.log.Error(liberr.Wrap(err), err.Error())\n}\n\n\/\/\n\/\/ Analyze all of the VMs with NICs referencing the profile.\nfunc (r *NICProfileHandler) validate(profile *model.NICProfile) {\n\ttx, err := r.DB.Begin()\n\tif err != nil {\n\t\tr.log.Error(err, \"begin tx failed.\")\n\t\treturn\n\t}\n\tdefer func() {\n\t\t_ = tx.End()\n\t}()\n\titr, err := tx.Find(\n\t\t&model.VM{},\n\t\tmodel.ListOptions{\n\t\t\tDetail: model.MaxDetail,\n\t\t})\n\tif err != nil {\n\t\tr.log.Error(err, \"list VM failed.\")\n\t\treturn\n\t}\n\tfor {\n\t\tvm := &model.VM{}\n\t\thasNext := itr.NextWith(vm)\n\t\tif !hasNext {\n\t\t\tbreak\n\t\t}\n\t\tfor _, nic := range vm.NICs {\n\t\t\tif nic.Profile != profile.ID {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tvm.RevisionValidated = 0\n\t\t\terr = tx.Update(vm)\n\t\t\tif err != nil {\n\t\t\t\tr.log.Error(err, \"VM (update) failed.\")\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t}\n\terr = tx.Commit()\n\tif err != nil {\n\t\tr.log.Error(err, \"Tx commit failed.\")\n\t\treturn\n\t}\n}\n\n\/\/\n\/\/ Watch for DiskProfile changes and validate VMs as needed.\ntype DiskProfileHandler struct {\n\tlibmodel.StockEventHandler\n\t\/\/ DB.\n\tDB libmodel.DB\n\t\/\/ Logger.\n\tlog logr.Logger\n}\n\n\/\/\n\/\/ Profile updated.\n\/\/ Analyze all referencing VMs.\nfunc (r *DiskProfileHandler) Updated(event libmodel.Event) {\n\tprofile, cast := event.Model.(*model.DiskProfile)\n\tif cast {\n\t\tr.validate(profile)\n\t}\n}\n\n\/\/\n\/\/ Report errors.\nfunc (r *DiskProfileHandler) Error(err error) {\n\tr.log.Error(liberr.Wrap(err), err.Error())\n}\n\n\/\/\n\/\/ Analyze all of the VMs with disks referencing the profile.\nfunc (r *DiskProfileHandler) validate(profile *model.DiskProfile) {\n\ttx, err := r.DB.Begin()\n\tif err != nil {\n\t\tr.log.Error(err, \"begin tx failed.\")\n\t\treturn\n\t}\n\tdefer func() {\n\t\t_ = tx.End()\n\t}()\n\taffectedDisks := map[string]bool{}\n\titr, err := tx.Find(\n\t\t&model.Disk{},\n\t\tmodel.ListOptions{\n\t\t\tDetail: model.MaxDetail,\n\t\t})\n\tif err != nil {\n\t\tr.log.Error(err, \"list Disk failed.\")\n\t\treturn\n\t}\n\tfor {\n\t\tdisk := &model.Disk{}\n\t\thasNext := itr.NextWith(disk)\n\t\tif hasNext {\n\t\t\tif disk.Profile == profile.ID {\n\t\t\t\taffectedDisks[disk.ID] = true\n\t\t\t}\n\t\t} else {\n\t\t\tbreak\n\t\t}\n\t}\n\titr, err = tx.Find(\n\t\t&model.VM{},\n\t\tmodel.ListOptions{\n\t\t\tDetail: model.MaxDetail,\n\t\t})\n\tif err != nil {\n\t\tr.log.Error(err, \"list VM failed.\")\n\t\treturn\n\t}\n\tfor {\n\t\tvm := &model.VM{}\n\t\thasNext := itr.NextWith(vm)\n\t\tif !hasNext {\n\t\t\tbreak\n\t\t}\n\t\tfor _, da := range vm.DiskAttachments {\n\t\t\tif _, affected := affectedDisks[da.Disk]; !affected {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tvm.RevisionValidated = 0\n\t\t\terr = tx.Update(vm)\n\t\t\tif err != nil {\n\t\t\t\tr.log.Error(err, \"VM (update) failed.\")\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t}\n\terr = tx.Commit()\n\tif err != nil {\n\t\tr.log.Error(err, \"Tx commit failed.\")\n\t\treturn\n\t}\n}\n","avg_line_length":20.616892911,"max_line_length":84,"alphanum_fraction":0.6539615188} +{"size":71027,"ext":"go","lang":"Go","max_stars_count":1.0,"content":"package web\n\n\/\/ Copyright (c) Microsoft and contributors. All rights reserved.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\/\/\n\/\/ Code generated by Microsoft (R) AutoRest Code Generator.\n\/\/ Changes may cause incorrect behavior and will be lost if the code is regenerated.\n\nimport (\n\t\"context\"\n\t\"github.com\/Azure\/go-autorest\/autorest\"\n\t\"github.com\/Azure\/go-autorest\/autorest\/azure\"\n\t\"github.com\/Azure\/go-autorest\/autorest\/validation\"\n\t\"github.com\/Azure\/go-autorest\/tracing\"\n\t\"net\/http\"\n)\n\n\/\/ RecommendationsClient is the webSite Management Client\ntype RecommendationsClient struct {\n\tBaseClient\n}\n\n\/\/ NewRecommendationsClient creates an instance of the RecommendationsClient client.\nfunc NewRecommendationsClient(subscriptionID string) RecommendationsClient {\n\treturn NewRecommendationsClientWithBaseURI(DefaultBaseURI, subscriptionID)\n}\n\n\/\/ NewRecommendationsClientWithBaseURI creates an instance of the RecommendationsClient client using a custom endpoint.\n\/\/ Use this when interacting with an Azure cloud that uses a non-standard base URI (sovereign clouds, Azure stack).\nfunc NewRecommendationsClientWithBaseURI(baseURI string, subscriptionID string) RecommendationsClient {\n\treturn RecommendationsClient{NewWithBaseURI(baseURI, subscriptionID)}\n}\n\n\/\/ DisableAllForHostingEnvironment description for Disable all recommendations for an app.\n\/\/ Parameters:\n\/\/ resourceGroupName - name of the resource group to which the resource belongs.\n\/\/ environmentName - name of the app.\nfunc (client RecommendationsClient) DisableAllForHostingEnvironment(ctx context.Context, resourceGroupName string, environmentName string, hostingEnvironmentName string) (result autorest.Response, err error) {\n\tif tracing.IsEnabled() {\n\t\tctx = tracing.StartSpan(ctx, fqdn+\"\/RecommendationsClient.DisableAllForHostingEnvironment\")\n\t\tdefer func() {\n\t\t\tsc := -1\n\t\t\tif result.Response != nil {\n\t\t\t\tsc = result.Response.StatusCode\n\t\t\t}\n\t\t\ttracing.EndSpan(ctx, sc, err)\n\t\t}()\n\t}\n\tif err := validation.Validate([]validation.Validation{\n\t\t{TargetValue: resourceGroupName,\n\t\t\tConstraints: []validation.Constraint{{Target: \"resourceGroupName\", Name: validation.MaxLength, Rule: 90, Chain: nil},\n\t\t\t\t{Target: \"resourceGroupName\", Name: validation.MinLength, Rule: 1, Chain: nil},\n\t\t\t\t{Target: \"resourceGroupName\", Name: validation.Pattern, Rule: `^[-\\w\\._\\(\\)]+[^\\.]$`, Chain: nil}}}}); err != nil {\n\t\treturn result, validation.NewError(\"web.RecommendationsClient\", \"DisableAllForHostingEnvironment\", err.Error())\n\t}\n\n\treq, err := client.DisableAllForHostingEnvironmentPreparer(ctx, resourceGroupName, environmentName, hostingEnvironmentName)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"DisableAllForHostingEnvironment\", nil, \"Failure preparing request\")\n\t\treturn\n\t}\n\n\tresp, err := client.DisableAllForHostingEnvironmentSender(req)\n\tif err != nil {\n\t\tresult.Response = resp\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"DisableAllForHostingEnvironment\", resp, \"Failure sending request\")\n\t\treturn\n\t}\n\n\tresult, err = client.DisableAllForHostingEnvironmentResponder(resp)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"DisableAllForHostingEnvironment\", resp, \"Failure responding to request\")\n\t}\n\n\treturn\n}\n\n\/\/ DisableAllForHostingEnvironmentPreparer prepares the DisableAllForHostingEnvironment request.\nfunc (client RecommendationsClient) DisableAllForHostingEnvironmentPreparer(ctx context.Context, resourceGroupName string, environmentName string, hostingEnvironmentName string) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"hostingEnvironmentName\": autorest.Encode(\"path\", hostingEnvironmentName),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2019-08-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t\t\"environmentName\": autorest.Encode(\"query\", environmentName),\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsPost(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"\/subscriptions\/{subscriptionId}\/resourceGroups\/{resourceGroupName}\/providers\/Microsoft.Web\/hostingEnvironments\/{hostingEnvironmentName}\/recommendations\/disable\", pathParameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}\n\n\/\/ DisableAllForHostingEnvironmentSender sends the DisableAllForHostingEnvironment request. The method will close the\n\/\/ http.Response Body if it receives an error.\nfunc (client RecommendationsClient) DisableAllForHostingEnvironmentSender(req *http.Request) (*http.Response, error) {\n\treturn client.Send(req, azure.DoRetryWithRegistration(client.Client))\n}\n\n\/\/ DisableAllForHostingEnvironmentResponder handles the response to the DisableAllForHostingEnvironment request. The method always\n\/\/ closes the http.Response Body.\nfunc (client RecommendationsClient) DisableAllForHostingEnvironmentResponder(resp *http.Response) (result autorest.Response, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusNoContent),\n\t\tautorest.ByClosing())\n\tresult.Response = resp\n\treturn\n}\n\n\/\/ DisableAllForWebApp description for Disable all recommendations for an app.\n\/\/ Parameters:\n\/\/ resourceGroupName - name of the resource group to which the resource belongs.\n\/\/ siteName - name of the app.\nfunc (client RecommendationsClient) DisableAllForWebApp(ctx context.Context, resourceGroupName string, siteName string) (result autorest.Response, err error) {\n\tif tracing.IsEnabled() {\n\t\tctx = tracing.StartSpan(ctx, fqdn+\"\/RecommendationsClient.DisableAllForWebApp\")\n\t\tdefer func() {\n\t\t\tsc := -1\n\t\t\tif result.Response != nil {\n\t\t\t\tsc = result.Response.StatusCode\n\t\t\t}\n\t\t\ttracing.EndSpan(ctx, sc, err)\n\t\t}()\n\t}\n\tif err := validation.Validate([]validation.Validation{\n\t\t{TargetValue: resourceGroupName,\n\t\t\tConstraints: []validation.Constraint{{Target: \"resourceGroupName\", Name: validation.MaxLength, Rule: 90, Chain: nil},\n\t\t\t\t{Target: \"resourceGroupName\", Name: validation.MinLength, Rule: 1, Chain: nil},\n\t\t\t\t{Target: \"resourceGroupName\", Name: validation.Pattern, Rule: `^[-\\w\\._\\(\\)]+[^\\.]$`, Chain: nil}}}}); err != nil {\n\t\treturn result, validation.NewError(\"web.RecommendationsClient\", \"DisableAllForWebApp\", err.Error())\n\t}\n\n\treq, err := client.DisableAllForWebAppPreparer(ctx, resourceGroupName, siteName)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"DisableAllForWebApp\", nil, \"Failure preparing request\")\n\t\treturn\n\t}\n\n\tresp, err := client.DisableAllForWebAppSender(req)\n\tif err != nil {\n\t\tresult.Response = resp\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"DisableAllForWebApp\", resp, \"Failure sending request\")\n\t\treturn\n\t}\n\n\tresult, err = client.DisableAllForWebAppResponder(resp)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"DisableAllForWebApp\", resp, \"Failure responding to request\")\n\t}\n\n\treturn\n}\n\n\/\/ DisableAllForWebAppPreparer prepares the DisableAllForWebApp request.\nfunc (client RecommendationsClient) DisableAllForWebAppPreparer(ctx context.Context, resourceGroupName string, siteName string) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"siteName\": autorest.Encode(\"path\", siteName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2019-08-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsPost(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"\/subscriptions\/{subscriptionId}\/resourceGroups\/{resourceGroupName}\/providers\/Microsoft.Web\/sites\/{siteName}\/recommendations\/disable\", pathParameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}\n\n\/\/ DisableAllForWebAppSender sends the DisableAllForWebApp request. The method will close the\n\/\/ http.Response Body if it receives an error.\nfunc (client RecommendationsClient) DisableAllForWebAppSender(req *http.Request) (*http.Response, error) {\n\treturn client.Send(req, azure.DoRetryWithRegistration(client.Client))\n}\n\n\/\/ DisableAllForWebAppResponder handles the response to the DisableAllForWebApp request. The method always\n\/\/ closes the http.Response Body.\nfunc (client RecommendationsClient) DisableAllForWebAppResponder(resp *http.Response) (result autorest.Response, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusNoContent),\n\t\tautorest.ByClosing())\n\tresult.Response = resp\n\treturn\n}\n\n\/\/ DisableRecommendationForHostingEnvironment description for Disables the specific rule for a web site permanently.\n\/\/ Parameters:\n\/\/ resourceGroupName - name of the resource group to which the resource belongs.\n\/\/ environmentName - site name\n\/\/ name - rule name\nfunc (client RecommendationsClient) DisableRecommendationForHostingEnvironment(ctx context.Context, resourceGroupName string, environmentName string, name string, hostingEnvironmentName string) (result autorest.Response, err error) {\n\tif tracing.IsEnabled() {\n\t\tctx = tracing.StartSpan(ctx, fqdn+\"\/RecommendationsClient.DisableRecommendationForHostingEnvironment\")\n\t\tdefer func() {\n\t\t\tsc := -1\n\t\t\tif result.Response != nil {\n\t\t\t\tsc = result.Response.StatusCode\n\t\t\t}\n\t\t\ttracing.EndSpan(ctx, sc, err)\n\t\t}()\n\t}\n\tif err := validation.Validate([]validation.Validation{\n\t\t{TargetValue: resourceGroupName,\n\t\t\tConstraints: []validation.Constraint{{Target: \"resourceGroupName\", Name: validation.MaxLength, Rule: 90, Chain: nil},\n\t\t\t\t{Target: \"resourceGroupName\", Name: validation.MinLength, Rule: 1, Chain: nil},\n\t\t\t\t{Target: \"resourceGroupName\", Name: validation.Pattern, Rule: `^[-\\w\\._\\(\\)]+[^\\.]$`, Chain: nil}}}}); err != nil {\n\t\treturn result, validation.NewError(\"web.RecommendationsClient\", \"DisableRecommendationForHostingEnvironment\", err.Error())\n\t}\n\n\treq, err := client.DisableRecommendationForHostingEnvironmentPreparer(ctx, resourceGroupName, environmentName, name, hostingEnvironmentName)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"DisableRecommendationForHostingEnvironment\", nil, \"Failure preparing request\")\n\t\treturn\n\t}\n\n\tresp, err := client.DisableRecommendationForHostingEnvironmentSender(req)\n\tif err != nil {\n\t\tresult.Response = resp\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"DisableRecommendationForHostingEnvironment\", resp, \"Failure sending request\")\n\t\treturn\n\t}\n\n\tresult, err = client.DisableRecommendationForHostingEnvironmentResponder(resp)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"DisableRecommendationForHostingEnvironment\", resp, \"Failure responding to request\")\n\t}\n\n\treturn\n}\n\n\/\/ DisableRecommendationForHostingEnvironmentPreparer prepares the DisableRecommendationForHostingEnvironment request.\nfunc (client RecommendationsClient) DisableRecommendationForHostingEnvironmentPreparer(ctx context.Context, resourceGroupName string, environmentName string, name string, hostingEnvironmentName string) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"hostingEnvironmentName\": autorest.Encode(\"path\", hostingEnvironmentName),\n\t\t\"name\": autorest.Encode(\"path\", name),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2019-08-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t\t\"environmentName\": autorest.Encode(\"query\", environmentName),\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsPost(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"\/subscriptions\/{subscriptionId}\/resourceGroups\/{resourceGroupName}\/providers\/Microsoft.Web\/hostingEnvironments\/{hostingEnvironmentName}\/recommendations\/{name}\/disable\", pathParameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}\n\n\/\/ DisableRecommendationForHostingEnvironmentSender sends the DisableRecommendationForHostingEnvironment request. The method will close the\n\/\/ http.Response Body if it receives an error.\nfunc (client RecommendationsClient) DisableRecommendationForHostingEnvironmentSender(req *http.Request) (*http.Response, error) {\n\treturn client.Send(req, azure.DoRetryWithRegistration(client.Client))\n}\n\n\/\/ DisableRecommendationForHostingEnvironmentResponder handles the response to the DisableRecommendationForHostingEnvironment request. The method always\n\/\/ closes the http.Response Body.\nfunc (client RecommendationsClient) DisableRecommendationForHostingEnvironmentResponder(resp *http.Response) (result autorest.Response, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK),\n\t\tautorest.ByClosing())\n\tresult.Response = resp\n\treturn\n}\n\n\/\/ DisableRecommendationForSite description for Disables the specific rule for a web site permanently.\n\/\/ Parameters:\n\/\/ resourceGroupName - name of the resource group to which the resource belongs.\n\/\/ siteName - site name\n\/\/ name - rule name\nfunc (client RecommendationsClient) DisableRecommendationForSite(ctx context.Context, resourceGroupName string, siteName string, name string) (result autorest.Response, err error) {\n\tif tracing.IsEnabled() {\n\t\tctx = tracing.StartSpan(ctx, fqdn+\"\/RecommendationsClient.DisableRecommendationForSite\")\n\t\tdefer func() {\n\t\t\tsc := -1\n\t\t\tif result.Response != nil {\n\t\t\t\tsc = result.Response.StatusCode\n\t\t\t}\n\t\t\ttracing.EndSpan(ctx, sc, err)\n\t\t}()\n\t}\n\tif err := validation.Validate([]validation.Validation{\n\t\t{TargetValue: resourceGroupName,\n\t\t\tConstraints: []validation.Constraint{{Target: \"resourceGroupName\", Name: validation.MaxLength, Rule: 90, Chain: nil},\n\t\t\t\t{Target: \"resourceGroupName\", Name: validation.MinLength, Rule: 1, Chain: nil},\n\t\t\t\t{Target: \"resourceGroupName\", Name: validation.Pattern, Rule: `^[-\\w\\._\\(\\)]+[^\\.]$`, Chain: nil}}}}); err != nil {\n\t\treturn result, validation.NewError(\"web.RecommendationsClient\", \"DisableRecommendationForSite\", err.Error())\n\t}\n\n\treq, err := client.DisableRecommendationForSitePreparer(ctx, resourceGroupName, siteName, name)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"DisableRecommendationForSite\", nil, \"Failure preparing request\")\n\t\treturn\n\t}\n\n\tresp, err := client.DisableRecommendationForSiteSender(req)\n\tif err != nil {\n\t\tresult.Response = resp\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"DisableRecommendationForSite\", resp, \"Failure sending request\")\n\t\treturn\n\t}\n\n\tresult, err = client.DisableRecommendationForSiteResponder(resp)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"DisableRecommendationForSite\", resp, \"Failure responding to request\")\n\t}\n\n\treturn\n}\n\n\/\/ DisableRecommendationForSitePreparer prepares the DisableRecommendationForSite request.\nfunc (client RecommendationsClient) DisableRecommendationForSitePreparer(ctx context.Context, resourceGroupName string, siteName string, name string) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"name\": autorest.Encode(\"path\", name),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"siteName\": autorest.Encode(\"path\", siteName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2019-08-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsPost(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"\/subscriptions\/{subscriptionId}\/resourceGroups\/{resourceGroupName}\/providers\/Microsoft.Web\/sites\/{siteName}\/recommendations\/{name}\/disable\", pathParameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}\n\n\/\/ DisableRecommendationForSiteSender sends the DisableRecommendationForSite request. The method will close the\n\/\/ http.Response Body if it receives an error.\nfunc (client RecommendationsClient) DisableRecommendationForSiteSender(req *http.Request) (*http.Response, error) {\n\treturn client.Send(req, azure.DoRetryWithRegistration(client.Client))\n}\n\n\/\/ DisableRecommendationForSiteResponder handles the response to the DisableRecommendationForSite request. The method always\n\/\/ closes the http.Response Body.\nfunc (client RecommendationsClient) DisableRecommendationForSiteResponder(resp *http.Response) (result autorest.Response, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK),\n\t\tautorest.ByClosing())\n\tresult.Response = resp\n\treturn\n}\n\n\/\/ DisableRecommendationForSubscription description for Disables the specified rule so it will not apply to a\n\/\/ subscription in the future.\n\/\/ Parameters:\n\/\/ name - rule name\nfunc (client RecommendationsClient) DisableRecommendationForSubscription(ctx context.Context, name string) (result autorest.Response, err error) {\n\tif tracing.IsEnabled() {\n\t\tctx = tracing.StartSpan(ctx, fqdn+\"\/RecommendationsClient.DisableRecommendationForSubscription\")\n\t\tdefer func() {\n\t\t\tsc := -1\n\t\t\tif result.Response != nil {\n\t\t\t\tsc = result.Response.StatusCode\n\t\t\t}\n\t\t\ttracing.EndSpan(ctx, sc, err)\n\t\t}()\n\t}\n\treq, err := client.DisableRecommendationForSubscriptionPreparer(ctx, name)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"DisableRecommendationForSubscription\", nil, \"Failure preparing request\")\n\t\treturn\n\t}\n\n\tresp, err := client.DisableRecommendationForSubscriptionSender(req)\n\tif err != nil {\n\t\tresult.Response = resp\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"DisableRecommendationForSubscription\", resp, \"Failure sending request\")\n\t\treturn\n\t}\n\n\tresult, err = client.DisableRecommendationForSubscriptionResponder(resp)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"DisableRecommendationForSubscription\", resp, \"Failure responding to request\")\n\t}\n\n\treturn\n}\n\n\/\/ DisableRecommendationForSubscriptionPreparer prepares the DisableRecommendationForSubscription request.\nfunc (client RecommendationsClient) DisableRecommendationForSubscriptionPreparer(ctx context.Context, name string) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"name\": autorest.Encode(\"path\", name),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2019-08-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsPost(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"\/subscriptions\/{subscriptionId}\/providers\/Microsoft.Web\/recommendations\/{name}\/disable\", pathParameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}\n\n\/\/ DisableRecommendationForSubscriptionSender sends the DisableRecommendationForSubscription request. The method will close the\n\/\/ http.Response Body if it receives an error.\nfunc (client RecommendationsClient) DisableRecommendationForSubscriptionSender(req *http.Request) (*http.Response, error) {\n\treturn client.Send(req, azure.DoRetryWithRegistration(client.Client))\n}\n\n\/\/ DisableRecommendationForSubscriptionResponder handles the response to the DisableRecommendationForSubscription request. The method always\n\/\/ closes the http.Response Body.\nfunc (client RecommendationsClient) DisableRecommendationForSubscriptionResponder(resp *http.Response) (result autorest.Response, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK),\n\t\tautorest.ByClosing())\n\tresult.Response = resp\n\treturn\n}\n\n\/\/ GetRuleDetailsByHostingEnvironment description for Get a recommendation rule for an app.\n\/\/ Parameters:\n\/\/ resourceGroupName - name of the resource group to which the resource belongs.\n\/\/ hostingEnvironmentName - name of the hosting environment.\n\/\/ name - name of the recommendation.\n\/\/ updateSeen - specify true<\/code> to update the last-seen timestamp of the recommendation object.\n\/\/ recommendationID - the GUID of the recommendation object if you query an expired one. You don't need to\n\/\/ specify it to query an active entry.\nfunc (client RecommendationsClient) GetRuleDetailsByHostingEnvironment(ctx context.Context, resourceGroupName string, hostingEnvironmentName string, name string, updateSeen *bool, recommendationID string) (result RecommendationRule, err error) {\n\tif tracing.IsEnabled() {\n\t\tctx = tracing.StartSpan(ctx, fqdn+\"\/RecommendationsClient.GetRuleDetailsByHostingEnvironment\")\n\t\tdefer func() {\n\t\t\tsc := -1\n\t\t\tif result.Response.Response != nil {\n\t\t\t\tsc = result.Response.Response.StatusCode\n\t\t\t}\n\t\t\ttracing.EndSpan(ctx, sc, err)\n\t\t}()\n\t}\n\tif err := validation.Validate([]validation.Validation{\n\t\t{TargetValue: resourceGroupName,\n\t\t\tConstraints: []validation.Constraint{{Target: \"resourceGroupName\", Name: validation.MaxLength, Rule: 90, Chain: nil},\n\t\t\t\t{Target: \"resourceGroupName\", Name: validation.MinLength, Rule: 1, Chain: nil},\n\t\t\t\t{Target: \"resourceGroupName\", Name: validation.Pattern, Rule: `^[-\\w\\._\\(\\)]+[^\\.]$`, Chain: nil}}}}); err != nil {\n\t\treturn result, validation.NewError(\"web.RecommendationsClient\", \"GetRuleDetailsByHostingEnvironment\", err.Error())\n\t}\n\n\treq, err := client.GetRuleDetailsByHostingEnvironmentPreparer(ctx, resourceGroupName, hostingEnvironmentName, name, updateSeen, recommendationID)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"GetRuleDetailsByHostingEnvironment\", nil, \"Failure preparing request\")\n\t\treturn\n\t}\n\n\tresp, err := client.GetRuleDetailsByHostingEnvironmentSender(req)\n\tif err != nil {\n\t\tresult.Response = autorest.Response{Response: resp}\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"GetRuleDetailsByHostingEnvironment\", resp, \"Failure sending request\")\n\t\treturn\n\t}\n\n\tresult, err = client.GetRuleDetailsByHostingEnvironmentResponder(resp)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"GetRuleDetailsByHostingEnvironment\", resp, \"Failure responding to request\")\n\t}\n\n\treturn\n}\n\n\/\/ GetRuleDetailsByHostingEnvironmentPreparer prepares the GetRuleDetailsByHostingEnvironment request.\nfunc (client RecommendationsClient) GetRuleDetailsByHostingEnvironmentPreparer(ctx context.Context, resourceGroupName string, hostingEnvironmentName string, name string, updateSeen *bool, recommendationID string) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"hostingEnvironmentName\": autorest.Encode(\"path\", hostingEnvironmentName),\n\t\t\"name\": autorest.Encode(\"path\", name),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2019-08-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\tif updateSeen != nil {\n\t\tqueryParameters[\"updateSeen\"] = autorest.Encode(\"query\", *updateSeen)\n\t}\n\tif len(recommendationID) > 0 {\n\t\tqueryParameters[\"recommendationId\"] = autorest.Encode(\"query\", recommendationID)\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsGet(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"\/subscriptions\/{subscriptionId}\/resourceGroups\/{resourceGroupName}\/providers\/Microsoft.Web\/hostingEnvironments\/{hostingEnvironmentName}\/recommendations\/{name}\", pathParameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}\n\n\/\/ GetRuleDetailsByHostingEnvironmentSender sends the GetRuleDetailsByHostingEnvironment request. The method will close the\n\/\/ http.Response Body if it receives an error.\nfunc (client RecommendationsClient) GetRuleDetailsByHostingEnvironmentSender(req *http.Request) (*http.Response, error) {\n\treturn client.Send(req, azure.DoRetryWithRegistration(client.Client))\n}\n\n\/\/ GetRuleDetailsByHostingEnvironmentResponder handles the response to the GetRuleDetailsByHostingEnvironment request. The method always\n\/\/ closes the http.Response Body.\nfunc (client RecommendationsClient) GetRuleDetailsByHostingEnvironmentResponder(resp *http.Response) (result RecommendationRule, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK),\n\t\tautorest.ByUnmarshallingJSON(&result),\n\t\tautorest.ByClosing())\n\tresult.Response = autorest.Response{Response: resp}\n\treturn\n}\n\n\/\/ GetRuleDetailsByWebApp description for Get a recommendation rule for an app.\n\/\/ Parameters:\n\/\/ resourceGroupName - name of the resource group to which the resource belongs.\n\/\/ siteName - name of the app.\n\/\/ name - name of the recommendation.\n\/\/ updateSeen - specify true<\/code> to update the last-seen timestamp of the recommendation object.\n\/\/ recommendationID - the GUID of the recommendation object if you query an expired one. You don't need to\n\/\/ specify it to query an active entry.\nfunc (client RecommendationsClient) GetRuleDetailsByWebApp(ctx context.Context, resourceGroupName string, siteName string, name string, updateSeen *bool, recommendationID string) (result RecommendationRule, err error) {\n\tif tracing.IsEnabled() {\n\t\tctx = tracing.StartSpan(ctx, fqdn+\"\/RecommendationsClient.GetRuleDetailsByWebApp\")\n\t\tdefer func() {\n\t\t\tsc := -1\n\t\t\tif result.Response.Response != nil {\n\t\t\t\tsc = result.Response.Response.StatusCode\n\t\t\t}\n\t\t\ttracing.EndSpan(ctx, sc, err)\n\t\t}()\n\t}\n\tif err := validation.Validate([]validation.Validation{\n\t\t{TargetValue: resourceGroupName,\n\t\t\tConstraints: []validation.Constraint{{Target: \"resourceGroupName\", Name: validation.MaxLength, Rule: 90, Chain: nil},\n\t\t\t\t{Target: \"resourceGroupName\", Name: validation.MinLength, Rule: 1, Chain: nil},\n\t\t\t\t{Target: \"resourceGroupName\", Name: validation.Pattern, Rule: `^[-\\w\\._\\(\\)]+[^\\.]$`, Chain: nil}}}}); err != nil {\n\t\treturn result, validation.NewError(\"web.RecommendationsClient\", \"GetRuleDetailsByWebApp\", err.Error())\n\t}\n\n\treq, err := client.GetRuleDetailsByWebAppPreparer(ctx, resourceGroupName, siteName, name, updateSeen, recommendationID)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"GetRuleDetailsByWebApp\", nil, \"Failure preparing request\")\n\t\treturn\n\t}\n\n\tresp, err := client.GetRuleDetailsByWebAppSender(req)\n\tif err != nil {\n\t\tresult.Response = autorest.Response{Response: resp}\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"GetRuleDetailsByWebApp\", resp, \"Failure sending request\")\n\t\treturn\n\t}\n\n\tresult, err = client.GetRuleDetailsByWebAppResponder(resp)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"GetRuleDetailsByWebApp\", resp, \"Failure responding to request\")\n\t}\n\n\treturn\n}\n\n\/\/ GetRuleDetailsByWebAppPreparer prepares the GetRuleDetailsByWebApp request.\nfunc (client RecommendationsClient) GetRuleDetailsByWebAppPreparer(ctx context.Context, resourceGroupName string, siteName string, name string, updateSeen *bool, recommendationID string) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"name\": autorest.Encode(\"path\", name),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"siteName\": autorest.Encode(\"path\", siteName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2019-08-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\tif updateSeen != nil {\n\t\tqueryParameters[\"updateSeen\"] = autorest.Encode(\"query\", *updateSeen)\n\t}\n\tif len(recommendationID) > 0 {\n\t\tqueryParameters[\"recommendationId\"] = autorest.Encode(\"query\", recommendationID)\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsGet(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"\/subscriptions\/{subscriptionId}\/resourceGroups\/{resourceGroupName}\/providers\/Microsoft.Web\/sites\/{siteName}\/recommendations\/{name}\", pathParameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}\n\n\/\/ GetRuleDetailsByWebAppSender sends the GetRuleDetailsByWebApp request. The method will close the\n\/\/ http.Response Body if it receives an error.\nfunc (client RecommendationsClient) GetRuleDetailsByWebAppSender(req *http.Request) (*http.Response, error) {\n\treturn client.Send(req, azure.DoRetryWithRegistration(client.Client))\n}\n\n\/\/ GetRuleDetailsByWebAppResponder handles the response to the GetRuleDetailsByWebApp request. The method always\n\/\/ closes the http.Response Body.\nfunc (client RecommendationsClient) GetRuleDetailsByWebAppResponder(resp *http.Response) (result RecommendationRule, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK),\n\t\tautorest.ByUnmarshallingJSON(&result),\n\t\tautorest.ByClosing())\n\tresult.Response = autorest.Response{Response: resp}\n\treturn\n}\n\n\/\/ List description for List all recommendations for a subscription.\n\/\/ Parameters:\n\/\/ featured - specify true<\/code> to return only the most critical recommendations. The default is\n\/\/ false<\/code>, which returns all recommendations.\n\/\/ filter - filter is specified by using OData syntax. Example: $filter=channel eq 'Api' or channel eq\n\/\/ 'Notification' and startTime eq 2014-01-01T00:00:00Z and endTime eq 2014-12-31T23:59:59Z and timeGrain eq\n\/\/ duration'[PT1H|PT1M|P1D]\nfunc (client RecommendationsClient) List(ctx context.Context, featured *bool, filter string) (result RecommendationCollectionPage, err error) {\n\tif tracing.IsEnabled() {\n\t\tctx = tracing.StartSpan(ctx, fqdn+\"\/RecommendationsClient.List\")\n\t\tdefer func() {\n\t\t\tsc := -1\n\t\t\tif result.rc.Response.Response != nil {\n\t\t\t\tsc = result.rc.Response.Response.StatusCode\n\t\t\t}\n\t\t\ttracing.EndSpan(ctx, sc, err)\n\t\t}()\n\t}\n\tresult.fn = client.listNextResults\n\treq, err := client.ListPreparer(ctx, featured, filter)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"List\", nil, \"Failure preparing request\")\n\t\treturn\n\t}\n\n\tresp, err := client.ListSender(req)\n\tif err != nil {\n\t\tresult.rc.Response = autorest.Response{Response: resp}\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"List\", resp, \"Failure sending request\")\n\t\treturn\n\t}\n\n\tresult.rc, err = client.ListResponder(resp)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"List\", resp, \"Failure responding to request\")\n\t}\n\n\treturn\n}\n\n\/\/ ListPreparer prepares the List request.\nfunc (client RecommendationsClient) ListPreparer(ctx context.Context, featured *bool, filter string) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2019-08-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\tif featured != nil {\n\t\tqueryParameters[\"featured\"] = autorest.Encode(\"query\", *featured)\n\t}\n\tif len(filter) > 0 {\n\t\tqueryParameters[\"$filter\"] = filter\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsGet(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"\/subscriptions\/{subscriptionId}\/providers\/Microsoft.Web\/recommendations\", pathParameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}\n\n\/\/ ListSender sends the List request. The method will close the\n\/\/ http.Response Body if it receives an error.\nfunc (client RecommendationsClient) ListSender(req *http.Request) (*http.Response, error) {\n\treturn client.Send(req, azure.DoRetryWithRegistration(client.Client))\n}\n\n\/\/ ListResponder handles the response to the List request. The method always\n\/\/ closes the http.Response Body.\nfunc (client RecommendationsClient) ListResponder(resp *http.Response) (result RecommendationCollection, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK),\n\t\tautorest.ByUnmarshallingJSON(&result),\n\t\tautorest.ByClosing())\n\tresult.Response = autorest.Response{Response: resp}\n\treturn\n}\n\n\/\/ listNextResults retrieves the next set of results, if any.\nfunc (client RecommendationsClient) listNextResults(ctx context.Context, lastResults RecommendationCollection) (result RecommendationCollection, err error) {\n\treq, err := lastResults.recommendationCollectionPreparer(ctx)\n\tif err != nil {\n\t\treturn result, autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"listNextResults\", nil, \"Failure preparing next results request\")\n\t}\n\tif req == nil {\n\t\treturn\n\t}\n\tresp, err := client.ListSender(req)\n\tif err != nil {\n\t\tresult.Response = autorest.Response{Response: resp}\n\t\treturn result, autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"listNextResults\", resp, \"Failure sending next results request\")\n\t}\n\tresult, err = client.ListResponder(resp)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"listNextResults\", resp, \"Failure responding to next results request\")\n\t}\n\treturn\n}\n\n\/\/ ListComplete enumerates all values, automatically crossing page boundaries as required.\nfunc (client RecommendationsClient) ListComplete(ctx context.Context, featured *bool, filter string) (result RecommendationCollectionIterator, err error) {\n\tif tracing.IsEnabled() {\n\t\tctx = tracing.StartSpan(ctx, fqdn+\"\/RecommendationsClient.List\")\n\t\tdefer func() {\n\t\t\tsc := -1\n\t\t\tif result.Response().Response.Response != nil {\n\t\t\t\tsc = result.page.Response().Response.Response.StatusCode\n\t\t\t}\n\t\t\ttracing.EndSpan(ctx, sc, err)\n\t\t}()\n\t}\n\tresult.page, err = client.List(ctx, featured, filter)\n\treturn\n}\n\n\/\/ ListHistoryForHostingEnvironment description for Get past recommendations for an app, optionally specified by the\n\/\/ time range.\n\/\/ Parameters:\n\/\/ resourceGroupName - name of the resource group to which the resource belongs.\n\/\/ hostingEnvironmentName - name of the hosting environment.\n\/\/ expiredOnly - specify false<\/code> to return all recommendations. The default is true<\/code>,\n\/\/ which returns only expired recommendations.\n\/\/ filter - filter is specified by using OData syntax. Example: $filter=channel eq 'Api' or channel eq\n\/\/ 'Notification' and startTime eq 2014-01-01T00:00:00Z and endTime eq 2014-12-31T23:59:59Z and timeGrain eq\n\/\/ duration'[PT1H|PT1M|P1D]\nfunc (client RecommendationsClient) ListHistoryForHostingEnvironment(ctx context.Context, resourceGroupName string, hostingEnvironmentName string, expiredOnly *bool, filter string) (result RecommendationCollectionPage, err error) {\n\tif tracing.IsEnabled() {\n\t\tctx = tracing.StartSpan(ctx, fqdn+\"\/RecommendationsClient.ListHistoryForHostingEnvironment\")\n\t\tdefer func() {\n\t\t\tsc := -1\n\t\t\tif result.rc.Response.Response != nil {\n\t\t\t\tsc = result.rc.Response.Response.StatusCode\n\t\t\t}\n\t\t\ttracing.EndSpan(ctx, sc, err)\n\t\t}()\n\t}\n\tif err := validation.Validate([]validation.Validation{\n\t\t{TargetValue: resourceGroupName,\n\t\t\tConstraints: []validation.Constraint{{Target: \"resourceGroupName\", Name: validation.MaxLength, Rule: 90, Chain: nil},\n\t\t\t\t{Target: \"resourceGroupName\", Name: validation.MinLength, Rule: 1, Chain: nil},\n\t\t\t\t{Target: \"resourceGroupName\", Name: validation.Pattern, Rule: `^[-\\w\\._\\(\\)]+[^\\.]$`, Chain: nil}}}}); err != nil {\n\t\treturn result, validation.NewError(\"web.RecommendationsClient\", \"ListHistoryForHostingEnvironment\", err.Error())\n\t}\n\n\tresult.fn = client.listHistoryForHostingEnvironmentNextResults\n\treq, err := client.ListHistoryForHostingEnvironmentPreparer(ctx, resourceGroupName, hostingEnvironmentName, expiredOnly, filter)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"ListHistoryForHostingEnvironment\", nil, \"Failure preparing request\")\n\t\treturn\n\t}\n\n\tresp, err := client.ListHistoryForHostingEnvironmentSender(req)\n\tif err != nil {\n\t\tresult.rc.Response = autorest.Response{Response: resp}\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"ListHistoryForHostingEnvironment\", resp, \"Failure sending request\")\n\t\treturn\n\t}\n\n\tresult.rc, err = client.ListHistoryForHostingEnvironmentResponder(resp)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"ListHistoryForHostingEnvironment\", resp, \"Failure responding to request\")\n\t}\n\n\treturn\n}\n\n\/\/ ListHistoryForHostingEnvironmentPreparer prepares the ListHistoryForHostingEnvironment request.\nfunc (client RecommendationsClient) ListHistoryForHostingEnvironmentPreparer(ctx context.Context, resourceGroupName string, hostingEnvironmentName string, expiredOnly *bool, filter string) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"hostingEnvironmentName\": autorest.Encode(\"path\", hostingEnvironmentName),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2019-08-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\tif expiredOnly != nil {\n\t\tqueryParameters[\"expiredOnly\"] = autorest.Encode(\"query\", *expiredOnly)\n\t}\n\tif len(filter) > 0 {\n\t\tqueryParameters[\"$filter\"] = filter\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsGet(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"\/subscriptions\/{subscriptionId}\/resourceGroups\/{resourceGroupName}\/providers\/Microsoft.Web\/hostingEnvironments\/{hostingEnvironmentName}\/recommendationHistory\", pathParameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}\n\n\/\/ ListHistoryForHostingEnvironmentSender sends the ListHistoryForHostingEnvironment request. The method will close the\n\/\/ http.Response Body if it receives an error.\nfunc (client RecommendationsClient) ListHistoryForHostingEnvironmentSender(req *http.Request) (*http.Response, error) {\n\treturn client.Send(req, azure.DoRetryWithRegistration(client.Client))\n}\n\n\/\/ ListHistoryForHostingEnvironmentResponder handles the response to the ListHistoryForHostingEnvironment request. The method always\n\/\/ closes the http.Response Body.\nfunc (client RecommendationsClient) ListHistoryForHostingEnvironmentResponder(resp *http.Response) (result RecommendationCollection, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK),\n\t\tautorest.ByUnmarshallingJSON(&result),\n\t\tautorest.ByClosing())\n\tresult.Response = autorest.Response{Response: resp}\n\treturn\n}\n\n\/\/ listHistoryForHostingEnvironmentNextResults retrieves the next set of results, if any.\nfunc (client RecommendationsClient) listHistoryForHostingEnvironmentNextResults(ctx context.Context, lastResults RecommendationCollection) (result RecommendationCollection, err error) {\n\treq, err := lastResults.recommendationCollectionPreparer(ctx)\n\tif err != nil {\n\t\treturn result, autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"listHistoryForHostingEnvironmentNextResults\", nil, \"Failure preparing next results request\")\n\t}\n\tif req == nil {\n\t\treturn\n\t}\n\tresp, err := client.ListHistoryForHostingEnvironmentSender(req)\n\tif err != nil {\n\t\tresult.Response = autorest.Response{Response: resp}\n\t\treturn result, autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"listHistoryForHostingEnvironmentNextResults\", resp, \"Failure sending next results request\")\n\t}\n\tresult, err = client.ListHistoryForHostingEnvironmentResponder(resp)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"listHistoryForHostingEnvironmentNextResults\", resp, \"Failure responding to next results request\")\n\t}\n\treturn\n}\n\n\/\/ ListHistoryForHostingEnvironmentComplete enumerates all values, automatically crossing page boundaries as required.\nfunc (client RecommendationsClient) ListHistoryForHostingEnvironmentComplete(ctx context.Context, resourceGroupName string, hostingEnvironmentName string, expiredOnly *bool, filter string) (result RecommendationCollectionIterator, err error) {\n\tif tracing.IsEnabled() {\n\t\tctx = tracing.StartSpan(ctx, fqdn+\"\/RecommendationsClient.ListHistoryForHostingEnvironment\")\n\t\tdefer func() {\n\t\t\tsc := -1\n\t\t\tif result.Response().Response.Response != nil {\n\t\t\t\tsc = result.page.Response().Response.Response.StatusCode\n\t\t\t}\n\t\t\ttracing.EndSpan(ctx, sc, err)\n\t\t}()\n\t}\n\tresult.page, err = client.ListHistoryForHostingEnvironment(ctx, resourceGroupName, hostingEnvironmentName, expiredOnly, filter)\n\treturn\n}\n\n\/\/ ListHistoryForWebApp description for Get past recommendations for an app, optionally specified by the time range.\n\/\/ Parameters:\n\/\/ resourceGroupName - name of the resource group to which the resource belongs.\n\/\/ siteName - name of the app.\n\/\/ expiredOnly - specify false<\/code> to return all recommendations. The default is true<\/code>,\n\/\/ which returns only expired recommendations.\n\/\/ filter - filter is specified by using OData syntax. Example: $filter=channel eq 'Api' or channel eq\n\/\/ 'Notification' and startTime eq 2014-01-01T00:00:00Z and endTime eq 2014-12-31T23:59:59Z and timeGrain eq\n\/\/ duration'[PT1H|PT1M|P1D]\nfunc (client RecommendationsClient) ListHistoryForWebApp(ctx context.Context, resourceGroupName string, siteName string, expiredOnly *bool, filter string) (result RecommendationCollectionPage, err error) {\n\tif tracing.IsEnabled() {\n\t\tctx = tracing.StartSpan(ctx, fqdn+\"\/RecommendationsClient.ListHistoryForWebApp\")\n\t\tdefer func() {\n\t\t\tsc := -1\n\t\t\tif result.rc.Response.Response != nil {\n\t\t\t\tsc = result.rc.Response.Response.StatusCode\n\t\t\t}\n\t\t\ttracing.EndSpan(ctx, sc, err)\n\t\t}()\n\t}\n\tif err := validation.Validate([]validation.Validation{\n\t\t{TargetValue: resourceGroupName,\n\t\t\tConstraints: []validation.Constraint{{Target: \"resourceGroupName\", Name: validation.MaxLength, Rule: 90, Chain: nil},\n\t\t\t\t{Target: \"resourceGroupName\", Name: validation.MinLength, Rule: 1, Chain: nil},\n\t\t\t\t{Target: \"resourceGroupName\", Name: validation.Pattern, Rule: `^[-\\w\\._\\(\\)]+[^\\.]$`, Chain: nil}}}}); err != nil {\n\t\treturn result, validation.NewError(\"web.RecommendationsClient\", \"ListHistoryForWebApp\", err.Error())\n\t}\n\n\tresult.fn = client.listHistoryForWebAppNextResults\n\treq, err := client.ListHistoryForWebAppPreparer(ctx, resourceGroupName, siteName, expiredOnly, filter)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"ListHistoryForWebApp\", nil, \"Failure preparing request\")\n\t\treturn\n\t}\n\n\tresp, err := client.ListHistoryForWebAppSender(req)\n\tif err != nil {\n\t\tresult.rc.Response = autorest.Response{Response: resp}\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"ListHistoryForWebApp\", resp, \"Failure sending request\")\n\t\treturn\n\t}\n\n\tresult.rc, err = client.ListHistoryForWebAppResponder(resp)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"ListHistoryForWebApp\", resp, \"Failure responding to request\")\n\t}\n\n\treturn\n}\n\n\/\/ ListHistoryForWebAppPreparer prepares the ListHistoryForWebApp request.\nfunc (client RecommendationsClient) ListHistoryForWebAppPreparer(ctx context.Context, resourceGroupName string, siteName string, expiredOnly *bool, filter string) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"siteName\": autorest.Encode(\"path\", siteName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2019-08-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\tif expiredOnly != nil {\n\t\tqueryParameters[\"expiredOnly\"] = autorest.Encode(\"query\", *expiredOnly)\n\t}\n\tif len(filter) > 0 {\n\t\tqueryParameters[\"$filter\"] = filter\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsGet(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"\/subscriptions\/{subscriptionId}\/resourceGroups\/{resourceGroupName}\/providers\/Microsoft.Web\/sites\/{siteName}\/recommendationHistory\", pathParameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}\n\n\/\/ ListHistoryForWebAppSender sends the ListHistoryForWebApp request. The method will close the\n\/\/ http.Response Body if it receives an error.\nfunc (client RecommendationsClient) ListHistoryForWebAppSender(req *http.Request) (*http.Response, error) {\n\treturn client.Send(req, azure.DoRetryWithRegistration(client.Client))\n}\n\n\/\/ ListHistoryForWebAppResponder handles the response to the ListHistoryForWebApp request. The method always\n\/\/ closes the http.Response Body.\nfunc (client RecommendationsClient) ListHistoryForWebAppResponder(resp *http.Response) (result RecommendationCollection, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK),\n\t\tautorest.ByUnmarshallingJSON(&result),\n\t\tautorest.ByClosing())\n\tresult.Response = autorest.Response{Response: resp}\n\treturn\n}\n\n\/\/ listHistoryForWebAppNextResults retrieves the next set of results, if any.\nfunc (client RecommendationsClient) listHistoryForWebAppNextResults(ctx context.Context, lastResults RecommendationCollection) (result RecommendationCollection, err error) {\n\treq, err := lastResults.recommendationCollectionPreparer(ctx)\n\tif err != nil {\n\t\treturn result, autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"listHistoryForWebAppNextResults\", nil, \"Failure preparing next results request\")\n\t}\n\tif req == nil {\n\t\treturn\n\t}\n\tresp, err := client.ListHistoryForWebAppSender(req)\n\tif err != nil {\n\t\tresult.Response = autorest.Response{Response: resp}\n\t\treturn result, autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"listHistoryForWebAppNextResults\", resp, \"Failure sending next results request\")\n\t}\n\tresult, err = client.ListHistoryForWebAppResponder(resp)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"listHistoryForWebAppNextResults\", resp, \"Failure responding to next results request\")\n\t}\n\treturn\n}\n\n\/\/ ListHistoryForWebAppComplete enumerates all values, automatically crossing page boundaries as required.\nfunc (client RecommendationsClient) ListHistoryForWebAppComplete(ctx context.Context, resourceGroupName string, siteName string, expiredOnly *bool, filter string) (result RecommendationCollectionIterator, err error) {\n\tif tracing.IsEnabled() {\n\t\tctx = tracing.StartSpan(ctx, fqdn+\"\/RecommendationsClient.ListHistoryForWebApp\")\n\t\tdefer func() {\n\t\t\tsc := -1\n\t\t\tif result.Response().Response.Response != nil {\n\t\t\t\tsc = result.page.Response().Response.Response.StatusCode\n\t\t\t}\n\t\t\ttracing.EndSpan(ctx, sc, err)\n\t\t}()\n\t}\n\tresult.page, err = client.ListHistoryForWebApp(ctx, resourceGroupName, siteName, expiredOnly, filter)\n\treturn\n}\n\n\/\/ ListRecommendedRulesForHostingEnvironment description for Get all recommendations for an app.\n\/\/ Parameters:\n\/\/ resourceGroupName - name of the resource group to which the resource belongs.\n\/\/ hostingEnvironmentName - name of the app.\n\/\/ featured - specify true<\/code> to return only the most critical recommendations. The default is\n\/\/ false<\/code>, which returns all recommendations.\n\/\/ filter - return only channels specified in the filter. Filter is specified by using OData syntax. Example:\n\/\/ $filter=channel eq 'Api' or channel eq 'Notification'\nfunc (client RecommendationsClient) ListRecommendedRulesForHostingEnvironment(ctx context.Context, resourceGroupName string, hostingEnvironmentName string, featured *bool, filter string) (result RecommendationCollectionPage, err error) {\n\tif tracing.IsEnabled() {\n\t\tctx = tracing.StartSpan(ctx, fqdn+\"\/RecommendationsClient.ListRecommendedRulesForHostingEnvironment\")\n\t\tdefer func() {\n\t\t\tsc := -1\n\t\t\tif result.rc.Response.Response != nil {\n\t\t\t\tsc = result.rc.Response.Response.StatusCode\n\t\t\t}\n\t\t\ttracing.EndSpan(ctx, sc, err)\n\t\t}()\n\t}\n\tif err := validation.Validate([]validation.Validation{\n\t\t{TargetValue: resourceGroupName,\n\t\t\tConstraints: []validation.Constraint{{Target: \"resourceGroupName\", Name: validation.MaxLength, Rule: 90, Chain: nil},\n\t\t\t\t{Target: \"resourceGroupName\", Name: validation.MinLength, Rule: 1, Chain: nil},\n\t\t\t\t{Target: \"resourceGroupName\", Name: validation.Pattern, Rule: `^[-\\w\\._\\(\\)]+[^\\.]$`, Chain: nil}}}}); err != nil {\n\t\treturn result, validation.NewError(\"web.RecommendationsClient\", \"ListRecommendedRulesForHostingEnvironment\", err.Error())\n\t}\n\n\tresult.fn = client.listRecommendedRulesForHostingEnvironmentNextResults\n\treq, err := client.ListRecommendedRulesForHostingEnvironmentPreparer(ctx, resourceGroupName, hostingEnvironmentName, featured, filter)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"ListRecommendedRulesForHostingEnvironment\", nil, \"Failure preparing request\")\n\t\treturn\n\t}\n\n\tresp, err := client.ListRecommendedRulesForHostingEnvironmentSender(req)\n\tif err != nil {\n\t\tresult.rc.Response = autorest.Response{Response: resp}\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"ListRecommendedRulesForHostingEnvironment\", resp, \"Failure sending request\")\n\t\treturn\n\t}\n\n\tresult.rc, err = client.ListRecommendedRulesForHostingEnvironmentResponder(resp)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"ListRecommendedRulesForHostingEnvironment\", resp, \"Failure responding to request\")\n\t}\n\n\treturn\n}\n\n\/\/ ListRecommendedRulesForHostingEnvironmentPreparer prepares the ListRecommendedRulesForHostingEnvironment request.\nfunc (client RecommendationsClient) ListRecommendedRulesForHostingEnvironmentPreparer(ctx context.Context, resourceGroupName string, hostingEnvironmentName string, featured *bool, filter string) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"hostingEnvironmentName\": autorest.Encode(\"path\", hostingEnvironmentName),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2019-08-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\tif featured != nil {\n\t\tqueryParameters[\"featured\"] = autorest.Encode(\"query\", *featured)\n\t}\n\tif len(filter) > 0 {\n\t\tqueryParameters[\"$filter\"] = filter\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsGet(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"\/subscriptions\/{subscriptionId}\/resourceGroups\/{resourceGroupName}\/providers\/Microsoft.Web\/hostingEnvironments\/{hostingEnvironmentName}\/recommendations\", pathParameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}\n\n\/\/ ListRecommendedRulesForHostingEnvironmentSender sends the ListRecommendedRulesForHostingEnvironment request. The method will close the\n\/\/ http.Response Body if it receives an error.\nfunc (client RecommendationsClient) ListRecommendedRulesForHostingEnvironmentSender(req *http.Request) (*http.Response, error) {\n\treturn client.Send(req, azure.DoRetryWithRegistration(client.Client))\n}\n\n\/\/ ListRecommendedRulesForHostingEnvironmentResponder handles the response to the ListRecommendedRulesForHostingEnvironment request. The method always\n\/\/ closes the http.Response Body.\nfunc (client RecommendationsClient) ListRecommendedRulesForHostingEnvironmentResponder(resp *http.Response) (result RecommendationCollection, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK),\n\t\tautorest.ByUnmarshallingJSON(&result),\n\t\tautorest.ByClosing())\n\tresult.Response = autorest.Response{Response: resp}\n\treturn\n}\n\n\/\/ listRecommendedRulesForHostingEnvironmentNextResults retrieves the next set of results, if any.\nfunc (client RecommendationsClient) listRecommendedRulesForHostingEnvironmentNextResults(ctx context.Context, lastResults RecommendationCollection) (result RecommendationCollection, err error) {\n\treq, err := lastResults.recommendationCollectionPreparer(ctx)\n\tif err != nil {\n\t\treturn result, autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"listRecommendedRulesForHostingEnvironmentNextResults\", nil, \"Failure preparing next results request\")\n\t}\n\tif req == nil {\n\t\treturn\n\t}\n\tresp, err := client.ListRecommendedRulesForHostingEnvironmentSender(req)\n\tif err != nil {\n\t\tresult.Response = autorest.Response{Response: resp}\n\t\treturn result, autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"listRecommendedRulesForHostingEnvironmentNextResults\", resp, \"Failure sending next results request\")\n\t}\n\tresult, err = client.ListRecommendedRulesForHostingEnvironmentResponder(resp)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"listRecommendedRulesForHostingEnvironmentNextResults\", resp, \"Failure responding to next results request\")\n\t}\n\treturn\n}\n\n\/\/ ListRecommendedRulesForHostingEnvironmentComplete enumerates all values, automatically crossing page boundaries as required.\nfunc (client RecommendationsClient) ListRecommendedRulesForHostingEnvironmentComplete(ctx context.Context, resourceGroupName string, hostingEnvironmentName string, featured *bool, filter string) (result RecommendationCollectionIterator, err error) {\n\tif tracing.IsEnabled() {\n\t\tctx = tracing.StartSpan(ctx, fqdn+\"\/RecommendationsClient.ListRecommendedRulesForHostingEnvironment\")\n\t\tdefer func() {\n\t\t\tsc := -1\n\t\t\tif result.Response().Response.Response != nil {\n\t\t\t\tsc = result.page.Response().Response.Response.StatusCode\n\t\t\t}\n\t\t\ttracing.EndSpan(ctx, sc, err)\n\t\t}()\n\t}\n\tresult.page, err = client.ListRecommendedRulesForHostingEnvironment(ctx, resourceGroupName, hostingEnvironmentName, featured, filter)\n\treturn\n}\n\n\/\/ ListRecommendedRulesForWebApp description for Get all recommendations for an app.\n\/\/ Parameters:\n\/\/ resourceGroupName - name of the resource group to which the resource belongs.\n\/\/ siteName - name of the app.\n\/\/ featured - specify true<\/code> to return only the most critical recommendations. The default is\n\/\/ false<\/code>, which returns all recommendations.\n\/\/ filter - return only channels specified in the filter. Filter is specified by using OData syntax. Example:\n\/\/ $filter=channel eq 'Api' or channel eq 'Notification'\nfunc (client RecommendationsClient) ListRecommendedRulesForWebApp(ctx context.Context, resourceGroupName string, siteName string, featured *bool, filter string) (result RecommendationCollectionPage, err error) {\n\tif tracing.IsEnabled() {\n\t\tctx = tracing.StartSpan(ctx, fqdn+\"\/RecommendationsClient.ListRecommendedRulesForWebApp\")\n\t\tdefer func() {\n\t\t\tsc := -1\n\t\t\tif result.rc.Response.Response != nil {\n\t\t\t\tsc = result.rc.Response.Response.StatusCode\n\t\t\t}\n\t\t\ttracing.EndSpan(ctx, sc, err)\n\t\t}()\n\t}\n\tif err := validation.Validate([]validation.Validation{\n\t\t{TargetValue: resourceGroupName,\n\t\t\tConstraints: []validation.Constraint{{Target: \"resourceGroupName\", Name: validation.MaxLength, Rule: 90, Chain: nil},\n\t\t\t\t{Target: \"resourceGroupName\", Name: validation.MinLength, Rule: 1, Chain: nil},\n\t\t\t\t{Target: \"resourceGroupName\", Name: validation.Pattern, Rule: `^[-\\w\\._\\(\\)]+[^\\.]$`, Chain: nil}}}}); err != nil {\n\t\treturn result, validation.NewError(\"web.RecommendationsClient\", \"ListRecommendedRulesForWebApp\", err.Error())\n\t}\n\n\tresult.fn = client.listRecommendedRulesForWebAppNextResults\n\treq, err := client.ListRecommendedRulesForWebAppPreparer(ctx, resourceGroupName, siteName, featured, filter)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"ListRecommendedRulesForWebApp\", nil, \"Failure preparing request\")\n\t\treturn\n\t}\n\n\tresp, err := client.ListRecommendedRulesForWebAppSender(req)\n\tif err != nil {\n\t\tresult.rc.Response = autorest.Response{Response: resp}\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"ListRecommendedRulesForWebApp\", resp, \"Failure sending request\")\n\t\treturn\n\t}\n\n\tresult.rc, err = client.ListRecommendedRulesForWebAppResponder(resp)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"ListRecommendedRulesForWebApp\", resp, \"Failure responding to request\")\n\t}\n\n\treturn\n}\n\n\/\/ ListRecommendedRulesForWebAppPreparer prepares the ListRecommendedRulesForWebApp request.\nfunc (client RecommendationsClient) ListRecommendedRulesForWebAppPreparer(ctx context.Context, resourceGroupName string, siteName string, featured *bool, filter string) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"siteName\": autorest.Encode(\"path\", siteName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2019-08-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\tif featured != nil {\n\t\tqueryParameters[\"featured\"] = autorest.Encode(\"query\", *featured)\n\t}\n\tif len(filter) > 0 {\n\t\tqueryParameters[\"$filter\"] = filter\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsGet(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"\/subscriptions\/{subscriptionId}\/resourceGroups\/{resourceGroupName}\/providers\/Microsoft.Web\/sites\/{siteName}\/recommendations\", pathParameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}\n\n\/\/ ListRecommendedRulesForWebAppSender sends the ListRecommendedRulesForWebApp request. The method will close the\n\/\/ http.Response Body if it receives an error.\nfunc (client RecommendationsClient) ListRecommendedRulesForWebAppSender(req *http.Request) (*http.Response, error) {\n\treturn client.Send(req, azure.DoRetryWithRegistration(client.Client))\n}\n\n\/\/ ListRecommendedRulesForWebAppResponder handles the response to the ListRecommendedRulesForWebApp request. The method always\n\/\/ closes the http.Response Body.\nfunc (client RecommendationsClient) ListRecommendedRulesForWebAppResponder(resp *http.Response) (result RecommendationCollection, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK),\n\t\tautorest.ByUnmarshallingJSON(&result),\n\t\tautorest.ByClosing())\n\tresult.Response = autorest.Response{Response: resp}\n\treturn\n}\n\n\/\/ listRecommendedRulesForWebAppNextResults retrieves the next set of results, if any.\nfunc (client RecommendationsClient) listRecommendedRulesForWebAppNextResults(ctx context.Context, lastResults RecommendationCollection) (result RecommendationCollection, err error) {\n\treq, err := lastResults.recommendationCollectionPreparer(ctx)\n\tif err != nil {\n\t\treturn result, autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"listRecommendedRulesForWebAppNextResults\", nil, \"Failure preparing next results request\")\n\t}\n\tif req == nil {\n\t\treturn\n\t}\n\tresp, err := client.ListRecommendedRulesForWebAppSender(req)\n\tif err != nil {\n\t\tresult.Response = autorest.Response{Response: resp}\n\t\treturn result, autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"listRecommendedRulesForWebAppNextResults\", resp, \"Failure sending next results request\")\n\t}\n\tresult, err = client.ListRecommendedRulesForWebAppResponder(resp)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"listRecommendedRulesForWebAppNextResults\", resp, \"Failure responding to next results request\")\n\t}\n\treturn\n}\n\n\/\/ ListRecommendedRulesForWebAppComplete enumerates all values, automatically crossing page boundaries as required.\nfunc (client RecommendationsClient) ListRecommendedRulesForWebAppComplete(ctx context.Context, resourceGroupName string, siteName string, featured *bool, filter string) (result RecommendationCollectionIterator, err error) {\n\tif tracing.IsEnabled() {\n\t\tctx = tracing.StartSpan(ctx, fqdn+\"\/RecommendationsClient.ListRecommendedRulesForWebApp\")\n\t\tdefer func() {\n\t\t\tsc := -1\n\t\t\tif result.Response().Response.Response != nil {\n\t\t\t\tsc = result.page.Response().Response.Response.StatusCode\n\t\t\t}\n\t\t\ttracing.EndSpan(ctx, sc, err)\n\t\t}()\n\t}\n\tresult.page, err = client.ListRecommendedRulesForWebApp(ctx, resourceGroupName, siteName, featured, filter)\n\treturn\n}\n\n\/\/ ResetAllFilters description for Reset all recommendation opt-out settings for a subscription.\nfunc (client RecommendationsClient) ResetAllFilters(ctx context.Context) (result autorest.Response, err error) {\n\tif tracing.IsEnabled() {\n\t\tctx = tracing.StartSpan(ctx, fqdn+\"\/RecommendationsClient.ResetAllFilters\")\n\t\tdefer func() {\n\t\t\tsc := -1\n\t\t\tif result.Response != nil {\n\t\t\t\tsc = result.Response.StatusCode\n\t\t\t}\n\t\t\ttracing.EndSpan(ctx, sc, err)\n\t\t}()\n\t}\n\treq, err := client.ResetAllFiltersPreparer(ctx)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"ResetAllFilters\", nil, \"Failure preparing request\")\n\t\treturn\n\t}\n\n\tresp, err := client.ResetAllFiltersSender(req)\n\tif err != nil {\n\t\tresult.Response = resp\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"ResetAllFilters\", resp, \"Failure sending request\")\n\t\treturn\n\t}\n\n\tresult, err = client.ResetAllFiltersResponder(resp)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"ResetAllFilters\", resp, \"Failure responding to request\")\n\t}\n\n\treturn\n}\n\n\/\/ ResetAllFiltersPreparer prepares the ResetAllFilters request.\nfunc (client RecommendationsClient) ResetAllFiltersPreparer(ctx context.Context) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2019-08-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsPost(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"\/subscriptions\/{subscriptionId}\/providers\/Microsoft.Web\/recommendations\/reset\", pathParameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}\n\n\/\/ ResetAllFiltersSender sends the ResetAllFilters request. The method will close the\n\/\/ http.Response Body if it receives an error.\nfunc (client RecommendationsClient) ResetAllFiltersSender(req *http.Request) (*http.Response, error) {\n\treturn client.Send(req, azure.DoRetryWithRegistration(client.Client))\n}\n\n\/\/ ResetAllFiltersResponder handles the response to the ResetAllFilters request. The method always\n\/\/ closes the http.Response Body.\nfunc (client RecommendationsClient) ResetAllFiltersResponder(resp *http.Response) (result autorest.Response, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusNoContent),\n\t\tautorest.ByClosing())\n\tresult.Response = resp\n\treturn\n}\n\n\/\/ ResetAllFiltersForHostingEnvironment description for Reset all recommendation opt-out settings for an app.\n\/\/ Parameters:\n\/\/ resourceGroupName - name of the resource group to which the resource belongs.\n\/\/ environmentName - name of the app.\nfunc (client RecommendationsClient) ResetAllFiltersForHostingEnvironment(ctx context.Context, resourceGroupName string, environmentName string, hostingEnvironmentName string) (result autorest.Response, err error) {\n\tif tracing.IsEnabled() {\n\t\tctx = tracing.StartSpan(ctx, fqdn+\"\/RecommendationsClient.ResetAllFiltersForHostingEnvironment\")\n\t\tdefer func() {\n\t\t\tsc := -1\n\t\t\tif result.Response != nil {\n\t\t\t\tsc = result.Response.StatusCode\n\t\t\t}\n\t\t\ttracing.EndSpan(ctx, sc, err)\n\t\t}()\n\t}\n\tif err := validation.Validate([]validation.Validation{\n\t\t{TargetValue: resourceGroupName,\n\t\t\tConstraints: []validation.Constraint{{Target: \"resourceGroupName\", Name: validation.MaxLength, Rule: 90, Chain: nil},\n\t\t\t\t{Target: \"resourceGroupName\", Name: validation.MinLength, Rule: 1, Chain: nil},\n\t\t\t\t{Target: \"resourceGroupName\", Name: validation.Pattern, Rule: `^[-\\w\\._\\(\\)]+[^\\.]$`, Chain: nil}}}}); err != nil {\n\t\treturn result, validation.NewError(\"web.RecommendationsClient\", \"ResetAllFiltersForHostingEnvironment\", err.Error())\n\t}\n\n\treq, err := client.ResetAllFiltersForHostingEnvironmentPreparer(ctx, resourceGroupName, environmentName, hostingEnvironmentName)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"ResetAllFiltersForHostingEnvironment\", nil, \"Failure preparing request\")\n\t\treturn\n\t}\n\n\tresp, err := client.ResetAllFiltersForHostingEnvironmentSender(req)\n\tif err != nil {\n\t\tresult.Response = resp\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"ResetAllFiltersForHostingEnvironment\", resp, \"Failure sending request\")\n\t\treturn\n\t}\n\n\tresult, err = client.ResetAllFiltersForHostingEnvironmentResponder(resp)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"ResetAllFiltersForHostingEnvironment\", resp, \"Failure responding to request\")\n\t}\n\n\treturn\n}\n\n\/\/ ResetAllFiltersForHostingEnvironmentPreparer prepares the ResetAllFiltersForHostingEnvironment request.\nfunc (client RecommendationsClient) ResetAllFiltersForHostingEnvironmentPreparer(ctx context.Context, resourceGroupName string, environmentName string, hostingEnvironmentName string) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"hostingEnvironmentName\": autorest.Encode(\"path\", hostingEnvironmentName),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2019-08-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t\t\"environmentName\": autorest.Encode(\"query\", environmentName),\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsPost(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"\/subscriptions\/{subscriptionId}\/resourceGroups\/{resourceGroupName}\/providers\/Microsoft.Web\/hostingEnvironments\/{hostingEnvironmentName}\/recommendations\/reset\", pathParameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}\n\n\/\/ ResetAllFiltersForHostingEnvironmentSender sends the ResetAllFiltersForHostingEnvironment request. The method will close the\n\/\/ http.Response Body if it receives an error.\nfunc (client RecommendationsClient) ResetAllFiltersForHostingEnvironmentSender(req *http.Request) (*http.Response, error) {\n\treturn client.Send(req, azure.DoRetryWithRegistration(client.Client))\n}\n\n\/\/ ResetAllFiltersForHostingEnvironmentResponder handles the response to the ResetAllFiltersForHostingEnvironment request. The method always\n\/\/ closes the http.Response Body.\nfunc (client RecommendationsClient) ResetAllFiltersForHostingEnvironmentResponder(resp *http.Response) (result autorest.Response, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusNoContent),\n\t\tautorest.ByClosing())\n\tresult.Response = resp\n\treturn\n}\n\n\/\/ ResetAllFiltersForWebApp description for Reset all recommendation opt-out settings for an app.\n\/\/ Parameters:\n\/\/ resourceGroupName - name of the resource group to which the resource belongs.\n\/\/ siteName - name of the app.\nfunc (client RecommendationsClient) ResetAllFiltersForWebApp(ctx context.Context, resourceGroupName string, siteName string) (result autorest.Response, err error) {\n\tif tracing.IsEnabled() {\n\t\tctx = tracing.StartSpan(ctx, fqdn+\"\/RecommendationsClient.ResetAllFiltersForWebApp\")\n\t\tdefer func() {\n\t\t\tsc := -1\n\t\t\tif result.Response != nil {\n\t\t\t\tsc = result.Response.StatusCode\n\t\t\t}\n\t\t\ttracing.EndSpan(ctx, sc, err)\n\t\t}()\n\t}\n\tif err := validation.Validate([]validation.Validation{\n\t\t{TargetValue: resourceGroupName,\n\t\t\tConstraints: []validation.Constraint{{Target: \"resourceGroupName\", Name: validation.MaxLength, Rule: 90, Chain: nil},\n\t\t\t\t{Target: \"resourceGroupName\", Name: validation.MinLength, Rule: 1, Chain: nil},\n\t\t\t\t{Target: \"resourceGroupName\", Name: validation.Pattern, Rule: `^[-\\w\\._\\(\\)]+[^\\.]$`, Chain: nil}}}}); err != nil {\n\t\treturn result, validation.NewError(\"web.RecommendationsClient\", \"ResetAllFiltersForWebApp\", err.Error())\n\t}\n\n\treq, err := client.ResetAllFiltersForWebAppPreparer(ctx, resourceGroupName, siteName)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"ResetAllFiltersForWebApp\", nil, \"Failure preparing request\")\n\t\treturn\n\t}\n\n\tresp, err := client.ResetAllFiltersForWebAppSender(req)\n\tif err != nil {\n\t\tresult.Response = resp\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"ResetAllFiltersForWebApp\", resp, \"Failure sending request\")\n\t\treturn\n\t}\n\n\tresult, err = client.ResetAllFiltersForWebAppResponder(resp)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"web.RecommendationsClient\", \"ResetAllFiltersForWebApp\", resp, \"Failure responding to request\")\n\t}\n\n\treturn\n}\n\n\/\/ ResetAllFiltersForWebAppPreparer prepares the ResetAllFiltersForWebApp request.\nfunc (client RecommendationsClient) ResetAllFiltersForWebAppPreparer(ctx context.Context, resourceGroupName string, siteName string) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"siteName\": autorest.Encode(\"path\", siteName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2019-08-01\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsPost(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"\/subscriptions\/{subscriptionId}\/resourceGroups\/{resourceGroupName}\/providers\/Microsoft.Web\/sites\/{siteName}\/recommendations\/reset\", pathParameters),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}\n\n\/\/ ResetAllFiltersForWebAppSender sends the ResetAllFiltersForWebApp request. The method will close the\n\/\/ http.Response Body if it receives an error.\nfunc (client RecommendationsClient) ResetAllFiltersForWebAppSender(req *http.Request) (*http.Response, error) {\n\treturn client.Send(req, azure.DoRetryWithRegistration(client.Client))\n}\n\n\/\/ ResetAllFiltersForWebAppResponder handles the response to the ResetAllFiltersForWebApp request. The method always\n\/\/ closes the http.Response Body.\nfunc (client RecommendationsClient) ResetAllFiltersForWebAppResponder(resp *http.Response) (result autorest.Response, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusNoContent),\n\t\tautorest.ByClosing())\n\tresult.Response = resp\n\treturn\n}\n","avg_line_length":46.7282894737,"max_line_length":249,"alphanum_fraction":0.7760006758} +{"size":2027,"ext":"go","lang":"Go","max_stars_count":13.0,"content":"\/\/ Code generated by go-swagger; DO NOT EDIT.\n\npackage direct_debits\n\n\/\/ This file was generated by the swagger tool.\n\/\/ Editing this file might prove futile when you re-run the swagger generate command\n\nimport (\n\t\"fmt\"\n\t\"io\"\n\n\t\"github.com\/go-openapi\/runtime\"\n\n\tstrfmt \"github.com\/go-openapi\/strfmt\"\n\n\t\"github.com\/form3tech-oss\/go-form3\/v3\/pkg\/generated\/models\"\n)\n\n\/\/ GetDirectDebitAdmissionReader is a Reader for the GetDirectDebitAdmission structure.\ntype GetDirectDebitAdmissionReader struct {\n\tformats strfmt.Registry\n}\n\n\/\/ ReadResponse reads a server response into the received o.\nfunc (o *GetDirectDebitAdmissionReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\n\tcase 200:\n\t\tresult := NewGetDirectDebitAdmissionOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}\n\n\/\/ NewGetDirectDebitAdmissionOK creates a GetDirectDebitAdmissionOK with default headers values\nfunc NewGetDirectDebitAdmissionOK() *GetDirectDebitAdmissionOK {\n\treturn &GetDirectDebitAdmissionOK{}\n}\n\n\/*GetDirectDebitAdmissionOK handles this case with default header values.\n\nDirect Debit Admission details\n*\/\ntype GetDirectDebitAdmissionOK struct {\n\n\t\/\/Payload\n\n\t\/\/ isStream: false\n\t*models.DirectDebitAdmissionDetailsResponse\n}\n\nfunc (o *GetDirectDebitAdmissionOK) Error() string {\n\treturn fmt.Sprintf(\"[GET \/transaction\/directdebits\/{id}\/admissions\/{admissionId}][%d] getDirectDebitAdmissionOK\", 200)\n}\n\nfunc (o *GetDirectDebitAdmissionOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {\n\n\to.DirectDebitAdmissionDetailsResponse = new(models.DirectDebitAdmissionDetailsResponse)\n\n\t\/\/ response payload\n\n\tif err := consumer.Consume(response.Body(), o.DirectDebitAdmissionDetailsResponse); err != nil && err != io.EOF {\n\t\treturn err\n\t}\n\n\treturn nil\n}\n","avg_line_length":27.7671232877,"max_line_length":141,"alphanum_fraction":0.7799703996} +{"size":1734,"ext":"go","lang":"Go","max_stars_count":9.0,"content":"\/\/ Copyright Suneido Software Corp. All rights reserved.\n\/\/ Governed by the MIT license found in the LICENSE file.\n\npackage mux\n\nimport \"github.com\/apmckinlay\/gsuneido\/util\/hacks\"\n\nconst bufSize = 4 * 1024\n\n\/\/ writeBuffer is used to combine small writes\ntype writeBuffer struct {\n\t*conn\n\tbuf []byte\n}\n\nfunc newWriteBuffer(c *conn) *writeBuffer {\n\treturn &writeBuffer{conn: c, buf: make([]byte, HeaderSize, bufSize)}\n}\n\n\/\/ space returns the amount of space remaining in the buffer\nfunc (b *writeBuffer) space() int {\n\treturn bufSize - len(b.buf)\n}\n\n\/\/ Write writes part of a message. If it is small it will be buffered.\n\/\/ final should be true for the last write of a message.\nfunc (b *writeBuffer) Write(id int, data []byte, final bool) {\n\tif len(data) > b.space() {\n\t\tb.flush(id, false)\n\t}\n\tif len(data) >= bufSize {\n\t\tb.conn.write(id, data, false, final)\n\t} else {\n\t\tb.buf = append(b.buf, data...)\n\t\tif final {\n\t\t\tb.flush(id, true)\n\t\t}\n\t}\n}\n\n\/\/ WriteString is like Write, but for a string.\nfunc (b *writeBuffer) WriteString(id int, data string, final bool) {\n\tif len(data) > b.space() {\n\t\tb.flush(id, false)\n\t}\n\tif len(data) >= bufSize {\n\t\t\/\/ it would be safer\/better to use []byte(s)\n\t\t\/\/ but strings are used for large data so we want to avoid copying\n\t\tb.conn.write(id, hacks.Stobs(data), false, final)\n\t} else {\n\t\tb.buf = append(b.buf, data...)\n\t\tif final {\n\t\t\tb.flush(id, true)\n\t\t}\n\t}\n}\n\n\/\/ WriteByte is like Write, but for a byte.\nfunc (b *writeBuffer) WriteByte(id int, data byte, final bool) {\n\tif b.space() == 0 {\n\t\tb.flush(id, false)\n\t}\n\tb.buf = append(b.buf, data)\n\tif final {\n\t\tb.flush(id, true)\n\t}\n}\n\nfunc (b *writeBuffer) flush(id int, final bool) {\n\tb.conn.write(id, b.buf, true, final)\n\tb.buf = b.buf[:HeaderSize]\n}\n","avg_line_length":23.7534246575,"max_line_length":70,"alphanum_fraction":0.6655132641} +{"size":364,"ext":"go","lang":"Go","max_stars_count":2.0,"content":"package tidbclient\n\nimport (\n\t. \"github.com\/journeymidnight\/yig\/meta\/types\"\n)\n\n\/\/cluster\nfunc (t *TidbClient) GetCluster(fsid, pool string) (cluster Cluster, err error) {\n\tsqltext := \"select fsid,pool,weight from cluster where fsid=? and pool=?\"\n\terr = t.Client.QueryRow(sqltext, fsid, pool).Scan(\n\t\t&cluster.Fsid,\n\t\t&cluster.Pool,\n\t\t&cluster.Weight,\n\t)\n\treturn\n}\n","avg_line_length":21.4117647059,"max_line_length":81,"alphanum_fraction":0.7115384615} +{"size":1434,"ext":"go","lang":"Go","max_stars_count":1.0,"content":"package nodeMemoryAvailableBytes\n\nimport (\n\tmetric_dao \"github.com\/containers-ai\/alameda\/datahub\/pkg\/dao\/metric\"\n\t\"github.com\/containers-ai\/alameda\/datahub\/pkg\/metric\"\n\t\"github.com\/containers-ai\/alameda\/datahub\/pkg\/repository\/prometheus\"\n)\n\nconst (\n\t\/\/ MetricName Metric name to query from prometheus\n\tMetricName = \"node:node_memory_bytes_available:sum\"\n\t\/\/ NodeLabel Node label name in the metric\n\tNodeLabel = \"node\"\n)\n\n\/\/ Entity Node memory avaliable entity\ntype Entity struct {\n\tPrometheusEntity prometheus.Entity\n\n\tNodeName string\n\tSamples []metric.Sample\n}\n\n\/\/ NewEntityFromPrometheusEntity New entity with field value assigned from prometheus entity\nfunc NewEntityFromPrometheusEntity(e prometheus.Entity) Entity {\n\n\tvar (\n\t\tsamples []metric.Sample\n\t)\n\n\tsamples = make([]metric.Sample, 0)\n\n\tfor _, value := range e.Values {\n\t\tsample := metric.Sample{\n\t\t\tTimestamp: value.UnixTime,\n\t\t\tValue: value.SampleValue,\n\t\t}\n\t\tsamples = append(samples, sample)\n\t}\n\n\treturn Entity{\n\t\tPrometheusEntity: e,\n\t\tNodeName: e.Labels[NodeLabel],\n\t\tSamples: samples,\n\t}\n}\n\n\/\/ NodeMetric Build NodeMetric base on entity properties\nfunc (e *Entity) NodeMetric() metric_dao.NodeMetric {\n\n\tvar (\n\t\tnodeMetric metric_dao.NodeMetric\n\t)\n\n\tnodeMetric = metric_dao.NodeMetric{\n\t\tNodeName: e.NodeName,\n\t\tMetrics: map[metric.NodeMetricType][]metric.Sample{\n\t\t\tmetric.TypeNodeMemoryAvailableBytes: e.Samples,\n\t\t},\n\t}\n\n\treturn nodeMetric\n}\n","avg_line_length":22.40625,"max_line_length":92,"alphanum_fraction":0.7454672245}